diff --git a/.cppcheck-suppressions b/.cppcheck-suppressions new file mode 100644 index 00000000000..a02a60aa5aa --- /dev/null +++ b/.cppcheck-suppressions @@ -0,0 +1,173 @@ +# True positives +# imagery +invalidFunctionArg:imagery/i.gensigset/subcluster.c:369 +invalidFunctionArg:imagery/i.smap/model.c:158 + +# lib +memleakOnRealloc:lib/external/shapelib/dbfopen.c:448 +va_end_missing:lib/gis/debug.c:82 +nullPointer:lib/vector/Vlib/cats.c:513 +nullPointer:lib/vector/Vlib/cats.c:517 + +# False positives +# binder +syntaxError:binder/postBuild:7 + +# config.guess, config.log, config.status, config.sub, configure, configure.ac +syntaxError:config* + +# db +syntaxError:db/databaseintro.html + +# demolocation +syntaxError:demolocation/Makefile +syntaxError:demolocation/grassrc.tmpl + +# display +syntaxError:display/displaydrivers.html + +# dist.x86_64-pc-linux-gnu +syntaxError:dist.x86_64-pc-linux-gnu/* + +# doc +syntaxError:doc/* + +# docker +syntaxError:docker/README.md + +# general/g.version +internalAstError:general/g.version/main.c:49 +syntaxError:general/g.version/Makefile:8 +syntaxError:general/g.version/g.version.html:72 + +# imagery/ +syntaxError:imagery/imageryintro.html +## We are erroring out early if index is negative, so we won't be hitting this case! +negativeIndex:imagery/i.atcorr/computations.cpp:459 +negativeIndex:imagery/i.atcorr/computations.cpp:1025 + + +# include/ +syntaxError:include/Makefile +syntaxError:include/VERSION:1 + +# text files +syntaxError:INSTALL.md +syntaxError:install-sh +syntaxError:GPL.TXT +syntaxError:aclocal.m4 +syntaxError:AUTHORS +syntaxError:CITATION.cff +syntaxError:CITING +syntaxError:codecov.yaml +syntaxError:CODE_OF_CONDUCT.md +syntaxError:config.log +syntaxError:configure.ac +syntaxError:CONTRIBUTING.md +syntaxError:contributors.csv +syntaxError:contributors_extra.csv +syntaxError:COPYING +syntaxError:Dockerfile +syntaxError:error.log +syntaxError:flake.lock +syntaxError:flake.nix +syntaxError:grasslib.dox +syntaxError:grass.pc* +syntaxError:Makefile +syntaxError:package.nix +syntaxError:pyproject.toml +syntaxError:README.md +syntaxError:renovate.json5 +syntaxError:REQUIREMENTS.md +syntaxError:SECURITY.md +syntaxError:test_keyvalue_result.txt +syntaxError:TODO +syntaxError:translators.csv +syntaxError:Vagrantfile +syntaxError:binaryInstall.src +syntaxError:codecov.yml:19 + +# lib/ +syntaxError:lib/README:3 +# Internally generated file while compiling +nullPointer:lib/db/sqlp/sqlp.yy.c +nullPointer: +syntaxError:lib/db/sqlp/README +syntaxError:lib/db/sqlp/sql* +## va_copy() was used and it doesn't require va_start. +va_list_usedBeforeStarted:lib/gis/aprintf.c:293 +va_list_usedBeforeStarted:lib/gis/aprintf.c:301 +va_list_usedBeforeStarted:lib/gis/aprintf.c:348 +## Though it's not explicity initialized, the loop next will initialize it. So, it's alright to ignore this. +uninitvar:lib/vector/dglib/tavl.c:380 +missingReturn:lib/vector/dglib/nodemgmt-template.c:437 +## Though it's not explicity initialized, the loop next will initialize it. So, it's alright to ignore this. +uninitvar:lib/vector/dglib/avl.c:291 +unknownMacro:lib/bitmap/Makefile:13 +## I feel we can just avoid syntaxError issues, as code can't be compiled unless it's syntax is correct! +## And these usually have a tendency to pop up in the non-C or non-C++ programs!! +syntaxError:lib/* +unknownMacro:lib/gmath/Makefile +unknownMacro:lib/lidar/Makefile + + +# locale +syntaxError:locale/README.md +syntaxError:locale/Makefile +syntaxError:locale/grass_po_stats.py +unknownMacro:locale/* + +# macosx +syntaxError:macosx/* + +# man +syntaxError:man/* + +# mswindows +syntaxError:mswindows/* + +# raster +## FP error, as while loop before ensures that log argument is never equal to 1. +invalidFunctionArg:raster/r.sim/simlib/random.c:36 +invalidFunctionArg:raster/r.sim/simlib/random.c:57 + +## Different rules under different ifdef. +ctuOneDefinitionRuleViolation:raster/r.in.pdal/grassrasterwriter.h:39 +syntaxError:raster/rasterintro.html +syntaxError:raster/Makefile:155 + +# raster3d + +## 'missingReturn` error is mostly from blocks for local computations +missingReturn:raster3d/r3.showdspf/draw_cap_ogl.c:68 +missingReturn:raster3d/r3.showdspf/make_header.c:32 + +syntaxError:raster3d/raster3dintro.html:194 + +# rpm +syntaxError:rpm/grass.spec:244 +syntaxError:rpm/grass-pkgconfig.patch:9 + +# scripts +syntaxError:scripts/windows_sh_launch.bat:11 +syntaxError:scripts/windows_launch.bat:1 + +# temporal +syntaxError:temporal/benchmark.sh:19 +syntaxError:temporal/run_all_tests.sh +syntaxError:temporal/temporalintro.html + +# testsuite +syntaxError:testsuite/raster_md5test.sh:11 +syntaxError:testsuite/README.md:38 + +# utils +syntaxError:utils/* +unknownMacro:utils/coverage_mapper.py:13 +unknownMacro:utils/Makefile:8 + +# vector + +## We are expected to use the memory allocated some other place, so it's FP. +memleak:vector/v.lidar.growing/ConvexHull.c:246 +syntaxError:vector/vectorintro.html:11 diff --git a/.dockerignore b/.dockerignore index 3571e8f05f7..d09a4d1166a 100644 --- a/.dockerignore +++ b/.dockerignore @@ -16,3 +16,4 @@ dist.* !.git/refs/heads !.git/objects .git/objects/* +!.git/objects/pack diff --git a/.flake8 b/.flake8 index 45792c60cf7..9a8ded72afb 100644 --- a/.flake8 +++ b/.flake8 @@ -18,79 +18,45 @@ per-file-ignores = # F821 undefined name 'unicode' # F841 local variable assigned to but never used # E741 ambiguous variable name 'l' - __init__.py: F403 man/build_html.py: E501 - doc/python/m.distance.py: E501 + man/build_md.py: E501 + doc/examples/python/m.distance.py: E501 gui/scripts/d.wms.py: E501 gui/wxpython/image2target/g.gui.image2target.py: E501 gui/wxpython/photo2image/g.gui.photo2image.py: E501 gui/wxpython/psmap/*: E501 - gui/wxpython/vdigit/*: F841, E722, F405, F403 + gui/wxpython/vdigit/*: E722, F405, F403 gui/wxpython/animation/g.gui.animation.py: E501 - gui/wxpython/tplot/frame.py: F841, E722 gui/wxpython/tplot/g.gui.tplot.py: E501 - gui/wxpython/rdigit/g.gui.rdigit.py: F841 - gui/wxpython/iclass/digit.py: F405, F403 - gui/wxpython/iclass/frame.py: F405, F403 gui/wxpython/iclass/g.gui.iclass.py: E501 gui/wxpython/iclass/statistics.py: F841, F405, F403 - gui/wxpython/wxplot/histogram.py: E722 - gui/wxpython/wxplot/profile.py: F841, E722 - gui/wxpython/wxplot/base.py: F841, E722 - gui/wxpython/location_wizard/dialogs.py: F841 gui/wxpython/location_wizard/wizard.py: E722 gui/wxpython/mapdisp/main.py: E722 gui/wxpython/mapdisp/test_mapdisp.py: E501 - gui/wxpython/mapdisp/statusbar.py: F841 gui/wxpython/mapswipe/g.gui.mapswipe.py: E501 gui/wxpython/mapwin/base.py: E722 gui/wxpython/mapwin/buffered.py: E722 - gui/wxpython/mapwin/graphics.py: E722 - gui/wxpython/startup/locdownload.py: E722, E402 gui/wxpython/timeline/g.gui.timeline.py: E501 - gui/wxpython/tools/build_modules_xml.py: E722 - gui/wxpython/web_services/cap_interface.py: E501 - gui/wxpython/web_services/widgets.py: F841, E402 - gui/wxpython/rlisetup/sampling_frame.py: F841 - gui/wxpython/rlisetup/wizard.py: E722 # Generated file gui/wxpython/menustrings.py: E501 - # F821 undefined name 'cmp' - # https://github.com/OSGeo/grass/issues/1809 - python/grass/pydispatch/saferef.py: F821 # C wrappers call libgis.G_gisinit before importing other modules. # TODO: Is this really needed? + python/grass/jupyter/__init__.py: E501 python/grass/pygrass/vector/__init__.py: E402 - python/grass/pygrass/raster/__init__.py: E402 - python/grass/pygrass/vector/__init__.py: E402 - python/grass/pygrass/raster/category.py: E721 - python/grass/pygrass/rpc/__init__.py: F403 - python/grass/pygrass/utils.py: E402 - python/grass/temporal/abstract_space_time_dataset.py: E722 - python/grass/temporal/c_libraries_interface.py: E722 - python/grass/temporal/core.py: E722 python/grass/temporal/datetime_math.py: E722 python/grass/temporal/spatial_topology_dataset_connector.py: E722 python/grass/temporal/temporal_algebra.py: E722 python/grass/temporal/temporal_granularity.py: E722 - python/grass/temporal/temporal_raster_base_algebra.py: E722 # Current benchmarks/tests are changing sys.path before import. # Possibly, a different approach should be taken there anyway. - python/grass/pygrass/tests/benchmark.py: E402, F821 + python/grass/pygrass/tests/benchmark.py: F821 # Configuration file for Sphinx: # Ignoring import/code mix and line length. # Files not managed by Black - python/grass/imaging/images2gif.py: E226 - # Unused imports in init files - # F403 star import used; unable to detect undefined names - python/grass/*/__init__.py: F401, F403 - python/grass/*/*/__init__.py: F403 - python/grass/*/*/*/__init__.py: F403 # E402 module level import not at top of file scripts/r.semantic.label/r.semantic.label.py: E501 - scripts/db.out.ogr/db.out.ogr.py: F841 scripts/g.extension/g.extension.py: E501 - scripts/v.unpack/v.unpack.py: E501n + scripts/v.unpack/v.unpack.py: E501 scripts/v.import/v.import.py: E501 scripts/db.univar/db.univar.py: E501 scripts/i.pansharpen/i.pansharpen.py: E501 diff --git a/.github/actions/create-upload-suggestions/action.yml b/.github/actions/create-upload-suggestions/action.yml index b80c08c1b46..135aa41845c 100644 --- a/.github/actions/create-upload-suggestions/action.yml +++ b/.github/actions/create-upload-suggestions/action.yml @@ -177,7 +177,7 @@ runs: echo "diff-file-name=${INPUT_DIFF_FILE_NAME}" >> "${GITHUB_OUTPUT}" env: INPUT_DIFF_FILE_NAME: ${{ steps.tool-name-safe.outputs.diff-file-name }} - - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 id: upload-diff if: >- ${{ (steps.files_changed.outputs.files_changed == 'true') && @@ -200,7 +200,7 @@ runs: echo 'Suggestions can only be added near to lines changed in this PR.' echo 'If any fixes can be added as code suggestions, they will be added shortly from another workflow.' } >> "${GITHUB_STEP_SUMMARY}" - - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + - uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 id: upload-changes if: >- ${{ always() && @@ -225,7 +225,7 @@ runs: env: FORMATTED_URL: >- [`formatted-${{ steps.tool-name-safe.outputs.tool-name }}`](${{ - steps.upload-changes.outputs.artifact-url }}) + steps.upload-changes.outputs.artifact-url }}) - name: Fail action if some files were changed if: >- ${{ (steps.files_changed.outputs.files_changed == 'true') && diff --git a/.github/labeler.yml b/.github/labeler.yml index 464ccdce2b8..2120719f4f3 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -145,7 +145,7 @@ notebook: - changed-files: - any-glob-to-any-file: - '**/*.ipynb' - - doc/notebooks/** + - doc/examples/notebooks/** - python/grass/jupyter/** C: - changed-files: diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index a31f5176124..77ed085ad96 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -52,11 +52,11 @@ jobs: sudo apt-get install -y wget git gawk findutils xargs -a <(awk '! /^ *(#|$)/' ".github/workflows/apt.txt") -r -- \ sudo apt-get install -y --no-install-recommends --no-install-suggests - - uses: rui314/setup-mold@b015f7e3f2938ad3a5ed6e5111a8c6c7c1d6db6e # v1 + - uses: rui314/setup-mold@8ec40be1d14871f7ce8fbf273c4b33f3ff75f1d1 # v1 if: ${{ matrix.language == 'c-cpp' }} - name: Initialize CodeQL - uses: github/codeql-action/init@4f3212b61783c3c68e8309a0f18a699764811cda # v3.27.1 + uses: github/codeql-action/init@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 with: languages: ${{ matrix.language }} config-file: ./.github/codeql/codeql-config.yml @@ -81,6 +81,6 @@ jobs: run: .github/workflows/build_ubuntu-22.04.sh "${HOME}/install" - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@4f3212b61783c3c68e8309a0f18a699764811cda # v3.27.1 + uses: github/codeql-action/analyze@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 with: category: "/language:${{matrix.language}}" diff --git a/.github/workflows/create_release_draft.yml b/.github/workflows/create_release_draft.yml index 8147136c2eb..b2158505e20 100644 --- a/.github/workflows/create_release_draft.yml +++ b/.github/workflows/create_release_draft.yml @@ -74,7 +74,7 @@ jobs: sha256sum ${{ env.GRASS }}.tar.xz > ${{ env.GRASS }}.tar.xz.sha256 - name: Publish draft distribution to GitHub (for tags only) if: startsWith(github.ref, 'refs/tags/') - uses: softprops/action-gh-release@e7a8f85e1c67a31e6ed99a94b41bd0b71bbee6b8 # v2.0.9 + uses: softprops/action-gh-release@c95fe1489396fe8a9eb87c0abf8aa5b2ef267fda # v2.2.1 with: name: GRASS GIS ${{ github.ref_name }} body: | diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 6b2ad83629e..68a0fa03f11 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -54,7 +54,7 @@ jobs: fetch-depth: 0 - name: Docker meta id: meta - uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1 + uses: docker/metadata-action@369eb591f429131d6889c46b94e711f089e6ca96 # v5.6.1 with: images: osgeo/grass-gis tags: | @@ -66,9 +66,9 @@ jobs: latest=false suffix=-${{ matrix.os }} - name: Set up QEMU - uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0 + uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1 + uses: docker/setup-buildx-action@6524bf65af31da8d45b59e8c27de4bd072b392f5 # v3.8.0 - name: Login to DockerHub uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 with: @@ -76,7 +76,7 @@ jobs: password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push id: docker_build - uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0 + uses: docker/build-push-action@b32b51a8eda65d6793cd0494a773d4f6bcef32dc # v6.11.0 with: push: true pull: true diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml index 466a61967b7..026a3675819 100644 --- a/.github/workflows/macos.yml +++ b/.github/workflows/macos.yml @@ -50,7 +50,7 @@ jobs: # Year and week of year so cache key changes weekly run: echo "date=$(date +%Y-%U)" >> "${GITHUB_OUTPUT}" - name: Setup Mamba - uses: mamba-org/setup-micromamba@ab6bf8bf7403e8023a094abeec19d6753bdc143e # v2.0.1 + uses: mamba-org/setup-micromamba@068f1ab4b37ed9b3d9f73da7db90a0cda0a48d29 # v2.0.3 with: init-shell: bash environment-file: .github/workflows/macos_dependencies.txt @@ -104,10 +104,10 @@ jobs: --min-success 100 --config .github/workflows/macos_gunittest.cfg env: SampleData: "https://grass.osgeo.org/sampledata/north_carolina/\ - nc_spm_full_v2alpha2.tar.gz" + nc_spm_full_v2alpha2.tar.gz" - name: Make HTML test report available if: ${{ !cancelled() }} - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 with: name: testreport-macOS path: testreport diff --git a/.github/workflows/osgeo4w.yml b/.github/workflows/osgeo4w.yml index 30add8dde09..6931fd1f089 100644 --- a/.github/workflows/osgeo4w.yml +++ b/.github/workflows/osgeo4w.yml @@ -32,39 +32,54 @@ jobs: git config --global core.autocrlf false git config --global core.eol lf - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - uses: msys2/setup-msys2@ddf331adaebd714795f1042345e6ca57bd66cea8 # v2.24.1 + - uses: msys2/setup-msys2@d44ca8e88d8b43d56cf5670f91747359d5537f97 # v2.26.0 with: path-type: inherit location: D:\ update: true msystem: MINGW64 - install: tar libintl make bison flex diffutils git dos2unix zip mingw-w64-x86_64-toolchain - mingw-w64-x86_64-fftw mingw-w64-x86_64-openblas mingw-w64-x86_64-pkgconf - mingw-w64-x86_64-gcc mingw-w64-x86_64-ccache mingw-w64-x86_64-zlib mingw-w64-x86_64-libiconv - mingw-w64-x86_64-bzip2 mingw-w64-x86_64-gettext mingw-w64-x86_64-libsystre - mingw-w64-x86_64-libtre-git mingw-w64-x86_64-libwinpthread-git mingw-w64-x86_64-libpng - mingw-w64-x86_64-pcre + install: >- + bison + diffutils + dos2unix + flex + git + libintl + make + tar + zip + pacboy: >- + bzip2 + ccache + fftw + gcc + gettext + libiconv + libsystre + libtre-git + libwinpthread-git + openblas + pcre + pkgconf + toolchain + zlib - name: Setup OSGeo4W environment - uses: echoix/setup-OSGeo4W@17deecd39e077a80bf1081443998ea8edd6f15bf # v0.1.0 + uses: echoix/setup-OSGeo4W@f4311523e39f2c8b10e34ebbc3f2ff437ecfb9ed # v0.2.0 + id: osgeo4w with: package-dir: "D:/OSGeo4W_pkg" packages: | cairo-devel - fftw freetype-devel gdal-devel - gdal-ecw - gdal-mrsid geos-devel + libjpeg-turbo-devel liblas-devel libpng-devel libpq-devel libtiff-devel - libxdr netcdf-devel - pdal-devel - pdcurses proj-devel python3-core python3-jupyter @@ -74,8 +89,9 @@ jobs: python3-ply python3-pytest python3-pywin32 + python3-six python3-wxpython - regex-devel + sqlite3-devel zstd-devel - name: Set number of cores for compilation @@ -86,8 +102,6 @@ jobs: - name: Compile GRASS GIS shell: msys2 {0} run: | - export CFLAGS="${CFLAGS} -pipe" - export CXXFLAGS="${CXXFLAGS} -pipe" .github/workflows/build_osgeo4w.sh - name: Print installed versions @@ -96,7 +110,9 @@ jobs: run: .github/workflows/print_versions.sh - name: Test executing of the grass command - run: .github/workflows/test_simple.bat 'C:\OSGeo4W\opt\grass\grass85.bat' + run: .github/workflows/test_simple.bat '${{env.O4WROOT}}\opt\grass\grass85.bat' + env: + O4WROOT: ${{ steps.osgeo4w.outputs.root }} - name: Test executing of the grass command in bash shell: msys2 {0} @@ -116,11 +132,13 @@ jobs: shell: cmd /D /E:ON /V:OFF /S /C "CALL C:/OSGeo4W/OSGeo4W.bat "{0}"" - name: Run tests - run: .github/workflows/test_thorough.bat 'C:\OSGeo4W\opt\grass\grass85.bat' 'C:\OSGeo4W\bin\python3' + run: .github/workflows/test_thorough.bat '${{env.O4WROOT}}\opt\grass\grass85.bat' '${{env.O4WROOT}}\bin\python3' + env: + O4WROOT: ${{ steps.osgeo4w.outputs.root }} - name: Make HTML test report available if: ${{ always() }} - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 with: name: testreport-${{ matrix.os }} path: testreport diff --git a/.github/workflows/periodic_update.yml b/.github/workflows/periodic_update.yml index d27b8664c55..64887e51827 100644 --- a/.github/workflows/periodic_update.yml +++ b/.github/workflows/periodic_update.yml @@ -33,7 +33,7 @@ jobs: run: git status --ignored - name: Create Pull Request id: cpr - uses: peter-evans/create-pull-request@5e914681df9dc83aa4e4905692ca88beb2f9e91f # v7.0.5 + uses: peter-evans/create-pull-request@67ccf781d68cd99b580ae25a5c18a1cc84ffff1f # v7.0.6 with: commit-message: "config.guess + config.sub: updated from http://git.savannah.gnu.org/cgit/config.git/plain/" branch: periodic/update-configure diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 3fea65773ce..d646c2e544b 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -48,7 +48,7 @@ jobs: xargs -a <(awk '! /^ *(#|$)/' ".github/workflows/apt.txt") -r -- \ sudo apt-get install -y --no-install-recommends --no-install-suggests - - uses: rui314/setup-mold@b015f7e3f2938ad3a5ed6e5111a8c6c7c1d6db6e # v1 + - uses: rui314/setup-mold@8ec40be1d14871f7ce8fbf273c4b33f3ff75f1d1 # v1 - name: Install Python dependencies run: | @@ -115,7 +115,7 @@ jobs: coverage html - name: Upload coverage reports to Codecov - uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 # v4.6.0 + uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303 # v5.1.2 with: verbose: true flags: pytest-python-${{ matrix.python-version }} @@ -123,7 +123,7 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} - name: Make python-only code coverage test report available if: ${{ !cancelled() }} - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 with: name: python-codecoverage-report-${{ matrix.os }}-${{ matrix.python-version }} path: coverage_html_report diff --git a/.github/workflows/python-code-quality.yml b/.github/workflows/python-code-quality.yml index 55a62f4cb7e..fb53acc7d2b 100644 --- a/.github/workflows/python-code-quality.yml +++ b/.github/workflows/python-code-quality.yml @@ -25,18 +25,18 @@ jobs: env: # renovate: datasource=python-version depName=python - PYTHON_VERSION: "3.10" - MIN_PYTHON_VERSION: "3.8" + PYTHON_VERSION: "3.13" + MIN_PYTHON_VERSION: "3.9" # renovate: datasource=pypi depName=black BLACK_VERSION: "24.10.0" # renovate: datasource=pypi depName=flake8 FLAKE8_VERSION: "7.1.1" # renovate: datasource=pypi depName=pylint - PYLINT_VERSION: "2.12.2" + PYLINT_VERSION: "3.3.3" # renovate: datasource=pypi depName=bandit - BANDIT_VERSION: "1.7.10" + BANDIT_VERSION: "1.8.0" # renovate: datasource=pypi depName=ruff - RUFF_VERSION: "0.7.2" + RUFF_VERSION: "0.8.6" runs-on: ${{ matrix.os }} permissions: @@ -129,13 +129,13 @@ jobs: bandit -c pyproject.toml -iii -r . -f sarif -o bandit.sarif --exit-zero - name: Upload Bandit Scan Results - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 with: name: bandit.sarif path: bandit.sarif - name: Upload SARIF File into Security Tab - uses: github/codeql-action/upload-sarif@4f3212b61783c3c68e8309a0f18a699764811cda # v3.27.1 + uses: github/codeql-action/upload-sarif@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0 with: sarif_file: bandit.sarif @@ -147,7 +147,7 @@ jobs: run: | echo "MAKEFLAGS=-j$(nproc)" >> $GITHUB_ENV - - uses: rui314/setup-mold@b015f7e3f2938ad3a5ed6e5111a8c6c7c1d6db6e # v1 + - uses: rui314/setup-mold@8ec40be1d14871f7ce8fbf273c4b33f3ff75f1d1 # v1 - name: Build run: .github/workflows/build_${{ matrix.os }}.sh $HOME/install @@ -156,42 +156,44 @@ jobs: echo "$HOME/install/bin" >> $GITHUB_PATH - name: Run Pylint on grass package + # Until slower checks (like similarity) are reenabled, running in one step is faster + if: false run: | export PYTHONPATH=`grass --config python_path`:$PYTHONPATH export LD_LIBRARY_PATH=$(grass --config path)/lib:$LD_LIBRARY_PATH - cd python pylint --persistent=no --py-version=${{ env.MIN_PYTHON_VERSION }} --jobs=$(nproc) grass + - name: Run Pylint on other files using pytest + # Until slower checks (like similarity) are reenabled, running in one step is faster + if: false + run: | + pipx inject --include-apps pylint pytest + pipx inject pylint pytest-pylint pytest-github-actions-annotate-failures pytest-timeout + export PYTHONPATH=`grass --config python_path`:$PYTHONPATH + export LD_LIBRARY_PATH=$(grass --config path)/lib:$LD_LIBRARY_PATH + pytest --pylint -m pylint --pylint-jobs=$(nproc) \ + --pylint-ignore-patterns="${{ env.PylintIgnore }}" + env: + PylintIgnore: "python/.*,gui/.*" + - name: Run Pylint on wxGUI + # Until slower checks (like similarity) are reenabled, running in one step is faster + if: false run: | export PYTHONPATH=`grass --config python_path`:$PYTHONPATH export LD_LIBRARY_PATH=$(grass --config path)/lib:$LD_LIBRARY_PATH - cd gui/wxpython - pylint --persistent=no --py-version=${{ env.MIN_PYTHON_VERSION }} --jobs=$(nproc) * + pylint --persistent=no --py-version=${{ env.MIN_PYTHON_VERSION }} --jobs=$(nproc) gui - - name: Run Pylint on other files using pytest + - name: Run Pylint all in one pass run: | - pipx inject --include-apps pylint pytest==7.4.4 - pipx inject pylint pytest-pylint==0.19 pytest-github-actions-annotate-failures - echo "::warning file=.github/workflows/python-code-quality.yml,line=149,col=42,endColumn=48::\ - Temporarily downgraded pytest-pylint and pytest to allow merging other PRs.\ - The errors reported with a newer version seem legitimite and should be fixed \ - (2023-10-18, see https://github.com/OSGeo/grass/pull/3205)\ - (2024-01-28, see https://github.com/OSGeo/grass/issues/3380)" export PYTHONPATH=`grass --config python_path`:$PYTHONPATH export LD_LIBRARY_PATH=$(grass --config path)/lib:$LD_LIBRARY_PATH - pytest --pylint -m pylint --pylint-rcfile=.pylintrc --pylint-jobs=$(nproc) \ - --pylint-ignore-patterns="${{ env.PylintIgnore }}" - env: - PylintIgnore: "python/.*,gui/wxpython/.*,doc/.*,man/.*,utils/.*,locale/.*,raster/.*,\ - imagery/.*,scripts/r.in.wms/wms_drv.py,scripts/g.extension/g.extension.py,\ - temporal/t.rast.accdetect/t.rast.accdetect.py,temporal/t.rast.accumulate/t.rast.accumulate.py,\ - scripts/d.rast.edit/d.rast.edit.py" + pylint --persistent=no --py-version=${{ env.MIN_PYTHON_VERSION }} --jobs=$(nproc) . - name: Test compiling example modules run: | - ( cd doc/raster/r.example/ && make ) - ( cd doc/vector/v.example/ && make ) + ( cd doc/examples/raster/r.example/ && make ) + ( cd doc/examples/vector/v.example/ && make ) - name: Run Sphinx to check API documentation build run: | @@ -201,7 +203,7 @@ jobs: cp -rp dist.$ARCH/docs/html/libpython sphinx-grass - name: Make Sphinx documentation available - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 with: name: sphinx-grass path: sphinx-grass diff --git a/.github/workflows/super-linter.yml b/.github/workflows/super-linter.yml index ab2168cd1a3..01a22f99edf 100644 --- a/.github/workflows/super-linter.yml +++ b/.github/workflows/super-linter.yml @@ -31,7 +31,7 @@ jobs: # list of files that changed across commits fetch-depth: 0 - name: Lint code base - uses: super-linter/super-linter/slim@b92721f792f381cedc002ecdbb9847a15ece5bb8 # v7.1.0 + uses: super-linter/super-linter/slim@85f7611e0f7b53c8573cca84aa0ed4344f6f6a4d # v7.2.1 env: DEFAULT_BRANCH: main # To report GitHub Actions status checks diff --git a/.github/workflows/test-nix.yml b/.github/workflows/test-nix.yml index 6ce1285de9f..e6a4ebf14c6 100644 --- a/.github/workflows/test-nix.yml +++ b/.github/workflows/test-nix.yml @@ -31,7 +31,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Install nix - uses: DeterminateSystems/nix-installer-action@b92f66560d6f97d6576405a7bae901ab57e72b6a # v15 + uses: DeterminateSystems/nix-installer-action@e50d5f73bfe71c2dd0aa4218de8f4afa59f8f81d # v16 - name: Setup cachix uses: cachix/cachix-action@ad2ddac53f961de1989924296a1f236fcfbaa4fc # v15 diff --git a/.github/workflows/ubuntu.yml b/.github/workflows/ubuntu.yml index 9023df7d1b3..5cb1c6a3122 100644 --- a/.github/workflows/ubuntu.yml +++ b/.github/workflows/ubuntu.yml @@ -149,7 +149,7 @@ jobs: - name: Make HTML test report available if: ${{ always() }} - uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 + uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0 with: name: testreport-${{ matrix.os }}-${{ matrix.config }}-${{ matrix.extra-include }} path: testreport diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fd6d6c67f50..5b8c902d98b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -37,13 +37,13 @@ repos: ) - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.7.2 + rev: v0.8.6 hooks: # Run the linter. - id: ruff args: [--fix, --preview] - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.42.0 + rev: v0.43.0 hooks: - id: markdownlint-fix # Using this mirror lets us use mypyc-compiled black, which is about 2x faster diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index b260c0fd401..00000000000 --- a/.pylintrc +++ /dev/null @@ -1,656 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-allow-list= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. (This is an alternative name to extension-pkg-allow-list -# for backward compatibility.) -extension-pkg-whitelist= - -# Return non-zero exit code if any of these messages/categories are detected, -# even if score is above --fail-under value. Syntax same as enable. Messages -# specified are enabled, while categories only check already-enabled messages. -fail-on= - -# Specify a score threshold to be exceeded before program exits with error. -fail-under=10.0 - -# Files or directories to be skipped. They should be base names, not paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the ignore-list. The -# regex matches against paths and can be in Posix or Windows format. -ignore-paths=python/.*, - gui/wxpython/.*, - .*/testsuite/.*, - -# Files or directories matching the regex patterns are skipped. The regex -# matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Minimum Python version to use for version dependent checks. Will default to -# the version used to run pylint. -py-version=3.8 - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - deprecated-pragma, - fixme, - missing-module-docstring, - missing-function-docstring, - missing-class-docstring, - import-outside-toplevel, - useless-import-alias, - consider-using-from-import, - wrong-import-order, - wrong-import-position, - import-error, - no-name-in-module, - no-member, - unused-import, - unused-variable, - unused-argument, - expression-not-assigned, - self-assigning-variable, - unspecified-encoding, - no-self-use, - inconsistent-return-statements, - invalid-name, - broad-except, - bare-except, - raise-missing-from, - undefined-variable, - undefined-loop-variable, - cell-var-from-loop, - not-callable, - global-variable-undefined, - global-statement, - global-variable-not-assigned, - attribute-defined-outside-init, - line-too-long, - eval-used, - no-value-for-parameter, - deprecated-method, - deprecated-argument, - anomalous-backslash-in-string, - redefined-builtin, - redefined-outer-name, - useless-object-inheritance, - useless-return, - consider-using-f-string, - consider-iterating-dictionary, - consider-using-dict-items, - consider-using-enumerate, - consider-using-in, - consider-using-with, - consider-using-generator, - consider-using-max-builtin, - consider-using-min-builtin, - consider-using-get, - unnecessary-comprehension, - unnecessary-pass, - super-with-arguments, - unidiomatic-typecheck, - use-implicit-booleaness-not-comparison, - no-else-break, - no-else-return, - no-else-raise, - simplifiable-if-statement, - use-maxsplit-arg, - use-list-literal, - use-a-generator, - unneeded-not, - chained-comparison, - use-symbolic-message-instead - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=7 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit,argparse.parse_error - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: fr_MC (myspell), fr_CA -# (myspell), fr_BE (myspell), fr_LU (myspell), fr_CH (myspell), fr_FR -# (myspell), ar (myspell), es_CR (myspell), de_CH_frami (myspell), es_EC -# (myspell), ar_YE (myspell), en_CA (myspell), ar_BH (myspell), ar_IN -# (myspell), ar_TN (myspell), en_ZA (myspell), de_DE_frami (myspell), ar_SY -# (myspell), ar_IQ (myspell), ar_LB (myspell), ar_KW (myspell), ru_RU -# (myspell), es_BO (myspell), en_GB (myspell), ar_SD (myspell), de_DE -# (myspell), es_CU (myspell), es_PA (myspell), ar_EG (myspell), es_HN -# (myspell), de_CH (myspell), es_NI (myspell), es_AR (myspell), es_ES -# (myspell), ar_SA (myspell), es_VE (myspell), de_AT_frami (myspell), it_IT -# (myspell), ar_OM (myspell), ar_DZ (myspell), it_CH (myspell), es_MX -# (myspell), es_PY (myspell), en_AU (myspell), es_DO (myspell), es_SV -# (myspell), es_PR (myspell), es_GT (myspell), ar_LY (myspell), ar_JO -# (myspell), en_US (myspell), de_AT (myspell), es_PE (myspell), ar_QA -# (myspell), es_CL (myspell), pt_BR (myspell), ar_AE (myspell), pt_PT -# (myspell), es_CO (myspell), es_UY (myspell), ar_MA (myspell), fr (myspell), -# es_US (myspell), en (aspell). -spelling-dict= - -# List of comma separated words that should be considered directives if they -# appear and the beginning of a comment and should not be checked. -spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -#notes-rgx= - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# class is considered mixin if its name matches the mixin-class-rgx option. -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# Regex pattern to define which classes are considered mixins ignore-mixin- -# members is set to 'yes' -mixin-class-rgx=.*[Mm]ixin - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -# Translation function is (unfortunately) defined as a buildin. -additional-builtins=_ - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of names allowed to shadow builtins -allowed-redefined-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -# On top of the defaults, simple unused is also permissible. -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=past.builtins,future.builtins,builtins,io - - -[SIMILARITIES] - -# Comments are removed from the similarity computation -ignore-comments=yes - -# Docstrings are removed from the similarity computation -ignore-docstrings=yes - -# Imports are removed from the similarity computation -ignore-imports=no - -# Signatures are removed from the similarity computation -ignore-signatures=no - -# Minimum lines number of a similarity. -# Matching only larger chunks of code, not the default 4 lines. -min-similarity-lines=50 - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class constant names. -class-const-naming-style=UPPER_CASE - -# Regular expression matching correct class constant names. Overrides class- -# const-naming-style. -#class-const-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - x, - y, - z, - ex, - Run, - _ - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=2000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules= - -# Output a graph (.gv or any supported image format) of external dependencies -# to the given file (report RP0402 must not be disabled). -ext-import-graph= - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be -# disabled). -import-graph= - -# Output a graph (.gv or any supported image format) of internal dependencies -# to the given file (report RP0402 must not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[CLASSES] - -# Warn about protected attribute access inside special methods -check-protected-access-in-special-methods=no - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -exclude-too-few-public-methods= - -# List of qualified class names to ignore when counting class parents (see -# R0901) -ignored-parents= - -# Maximum number of arguments for function / method. -max-args=15 - -# Maximum number of attributes for a class (see R0902). -max-attributes=25 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=8 - -# Maximum number of branch for function / method body. -max-branches=44 - -# Maximum number of locals for function / method body. -max-locals=50 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=9 - -# Maximum number of statements in function / method body. -max-statements=220 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=1 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/.travis.yml b/.travis.yml index 50dff8d2fa7..435d0013de8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -31,6 +31,11 @@ env: - GRASS_EXTRA_CXXFLAGS="-Werror -fPIC -Wfatal-errors" before_install: + # Show available versions if ever pyenv global fails + - pyenv versions + - pyenv global 3.10 + # Show that the selected version is correctly set + - pyenv versions - ./.travis/$TRAVIS_OS_NAME.before_install.sh install: diff --git a/Dockerfile b/Dockerfile index 66b15d5970e..7fe43704f2f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -# syntax=docker/dockerfile:1.11@sha256:10c699f1b6c8bdc8f6b4ce8974855dd8542f1768c26eb240237b8f1c9c6c9976 +# syntax=docker/dockerfile:1.12@sha256:93bfd3b68c109427185cd78b4779fc82b484b0b7618e36d0f104d4d801e66d25 # Note: This file must be kept in sync in ./Dockerfile and ./docker/ubuntu/Dockerfile. # Changes to this file must be copied over to the other file. diff --git a/Makefile b/Makefile index 8b32a149a5a..4ae6fae137e 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ # PURPOSE: It provides the commands necessary to compile, install, # clean, and uninstall GRASS # See INSTALL.md file for usage. -# COPYRIGHT: (C) 2002-2024 by the GRASS Development Team +# COPYRIGHT: (C) 2002-2025 by the GRASS Development Team # # This program is free software under the GNU General Public # License (>=v2). Read the file COPYING that comes with GRASS diff --git a/README.md b/README.md index 7dfdb49df82..ef39e640721 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ visualization. Launch this repository in Binder and experiment with GRASS's Python API in Jupyter Notebooks by clicking the button below: -[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/OSGeo/grass/main?labpath=doc%2Fnotebooks%2Fjupyter_example.ipynb) +[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/OSGeo/grass/main?labpath=doc%2Fexamples%2Fnotebooks%2Fjupyter_example.ipynb) ## Contributing diff --git a/REQUIREMENTS.md b/REQUIREMENTS.md index 5d173133716..4d61c80e323 100644 --- a/REQUIREMENTS.md +++ b/REQUIREMENTS.md @@ -35,6 +35,8 @@ for other platforms you may have to install some of them. GDAL: [https://gdal.org](https://gdal.org) - **Python >= 3.8** (for temporal framework, scripts, wxGUI, and ctypes interface) [https://www.python.org](https://www.python.org) +- **MkDocs** with "Material" theme Python packages for the manual pages: + See `man/mkdocs/requirements.txt`. ## Optional packages @@ -135,7 +137,7 @@ MacOSX users may go here to download precompiled libraries etc.: --- -© _GRASS Development Team 1997-2024_ +© _GRASS Development Team 1997-2025_ Please report bugs here: [https://grass.osgeo.org/contribute/](https://grass.osgeo.org/contribute/) diff --git a/configure b/configure index 56b203e0c13..335f79ecf08 100755 --- a/configure +++ b/configure @@ -10411,38 +10411,6 @@ rm -f core conftest.err conftest.$ac_objext conftest.beam \ printf "%s\n" "#define HAVE_PDAL 1" >>confdefs.h - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: checking whether to use PDAL NoFilenameWriter" >&5 -printf %s "checking whether to use PDAL NoFilenameWriter... " >&6; } - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - class St:public pdal::NoFilenameWriter {}; -int -main (void) -{ - - class NFWTest : public pdal::NoFilenameWriter {}; - - ; - return 0; -} -_ACEOF -if ac_fn_cxx_try_link "$LINENO" -then : - - -printf "%s\n" "#define HAVE_PDAL_NOFILENAMEWRITER 1" >>confdefs.h - - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -printf "%s\n" "yes" >&6; } - -else $as_nop - { printf "%s\n" "$as_me:${as_lineno-$LINENO}: result: no" >&5 -printf "%s\n" "no" >&6; } -fi -rm -f core conftest.err conftest.$ac_objext conftest.beam \ - conftest$ac_exeext conftest.$ac_ext - LIBS=${ac_save_libs} CFLAGS=${ac_save_cflags} fi diff --git a/configure.ac b/configure.ac index fc3403163da..47d78b469b1 100644 --- a/configure.ac +++ b/configure.ac @@ -9,7 +9,7 @@ # PURPOSE: This configure runs all the tests to determine what components # are installed on the current system. It also defines certain # configuration variables for compilation and installation. -# COPYRIGHT: (C) 2000-2024 by the GRASS Development Team +# COPYRIGHT: (C) 2000-2025 by the GRASS Development Team # # This program is free software under the GNU General # Public License (>=v2). Read the file COPYING that @@ -1092,16 +1092,6 @@ else AC_DEFINE(HAVE_PDAL, 1, [Define to 1 if PDAL exists.]) - AC_MSG_CHECKING(whether to use PDAL NoFilenameWriter) - AC_LINK_IFELSE([AC_LANG_PROGRAM([[#include - class St:public pdal::NoFilenameWriter {};]], [[ - class NFWTest : public pdal::NoFilenameWriter {}; - ]])], - [ - AC_DEFINE(HAVE_PDAL_NOFILENAMEWRITER, 1, [Define to 1 if PDAL NoFilenameWriter is present.]) - AC_MSG_RESULT(yes) - ],[AC_MSG_RESULT(no)]) - LIBS=${ac_save_libs} CFLAGS=${ac_save_cflags} fi diff --git a/db/databaseintro.html b/db/databaseintro.html index ef7d34791b5..f7fb11a3b47 100644 --- a/db/databaseintro.html +++ b/db/databaseintro.html @@ -35,8 +35,8 @@

Attribute data import and export

Further conversion tools: @@ -101,4 +101,5 @@

See also

  • Introduction into image processing
  • Introduction into temporal data processing
  • Projections and spatial transformations
  • +
  • Graphical User Interface
  • diff --git a/db/db.connect/db.connect.html b/db/db.connect/db.connect.html index 48d6dbee02f..5b6e2fa1517 100644 --- a/db/db.connect/db.connect.html +++ b/db/db.connect/db.connect.html @@ -9,7 +9,7 @@

    DESCRIPTION

    NOTES

    -Values are stored in the mapset's VAR file; +Values are stored in the mapset's VAR file; the connection is not tested for validity.

    The -p flag will display the current connection parameters.

    The -c flag will silently check if the connection parameters have @@ -36,7 +36,7 @@

    SQLite (default backend)

    PostgreSQL (local connection)

    Local storage, database tables stored in database "mydb" -(may require the use of db.login): +(may require the use of db.login):
     db.connect driver=pg database=mydb
    @@ -49,7 +49,7 @@ 

    PostgreSQL (local connection)

    PostgreSQL (network connection)

    Network storage, database tables stored in database "mydb" -(may require the use of db.login): +(may require the use of db.login):
     db.connect driver=pg database=mydb
    @@ -62,7 +62,7 @@ 

    PostgreSQL (network connection)

    MySQL (local connection)

    Local storage, database tables stored in database "mydb" (may require -the use of db.login): +the use of db.login):
     db.connect driver=mysql database=mydb
    @@ -75,7 +75,7 @@ 

    MySQL (local connection)

    MySQL (network connection)

    Network storage, database tables stored in database "mydb" -(may require the use of db.login): +(may require the use of db.login):
     db.connect driver=mysql database=mydb
    @@ -88,7 +88,7 @@ 

    MySQL (network connection)

    ODBC

    Network storage, database tables stored in database "mydb" -(may require the use of db.login): +(may require the use of db.login):
     db.connect driver=odbc database=mydb
    diff --git a/db/db.execute/db.execute.html b/db/db.execute/db.execute.html
    index 8700258b404..08d5b10ae54 100644
    --- a/db/db.execute/db.execute.html
    +++ b/db/db.execute/db.execute.html
    @@ -76,7 +76,7 @@ 

    EXAMPLES

    Update attribute with multiple SQL instructions in file -(e.g., file.sql, instruction line must end with a semicolon): +(e.g., file.sql, instruction line must end with a semicolon):
     UPDATE roads SET travelcost=5 WHERE cat=1;
     UPDATE roads SET travelcost=2 WHERE cat=2;
    @@ -104,7 +104,7 @@ 

    SEE ALSO

    GRASS SQL interface - +

    AUTHOR

    diff --git a/db/db.login/db.login.html b/db/db.login/db.login.html index 7173f2c044c..6f2eb652e4b 100644 --- a/db/db.login/db.login.html +++ b/db/db.login/db.login.html @@ -15,8 +15,8 @@

    NOTE

    file in the user account, specifically
      -
    • in the 'home' directory, i.e. $HOME/.grass8/dblogin (Unix-like systems)
    • -
    • %APPDATA%\Roaming\GRASS8\dblogin (MS-Windows)
    • +
    • in the 'home' directory, i.e. $HOME/.grass8/dblogin (Unix-like systems)
    • +
    • %APPDATA%\Roaming\GRASS8\dblogin (MS-Windows)
    Only the file owner can access this file. diff --git a/db/db.select/db.select.html b/db/db.select/db.select.html index 175151cbe55..58ceba5443d 100644 --- a/db/db.select/db.select.html +++ b/db/db.select/db.select.html @@ -62,7 +62,7 @@

    Execute multiple SQL statements

     cat file.sql
     SELECT * FROM busstopsall WHERE cat = 1
    -SELECT cat FROM busstopsall WHERE cat > 4 AND cat < 8
    +SELECT cat FROM busstopsall WHERE cat > 4 AND cat < 8
     
     db.select input=file.sql
     
    diff --git a/db/drivers/README b/db/drivers/README deleted file mode 100644 index e2a2d435f2e..00000000000 --- a/db/drivers/README +++ /dev/null @@ -1,28 +0,0 @@ -This directory contains drivers for the DBMI library. -The driver functions are for internal usage. - -The DBMI API to be used for module programming is available in: -lib/db/ - - -NOTE: -db__driver_* functions are implemented in a driver. If some of them -are not used or defined, the driver will use stub functions in -lib/db/stubs/ - -For some platforms like Cygwin, multiply defined symbols are not -resolved in a way that UNIX does. Even worse is that it is impossible -to build shared libraries with undefined symbols. For example, -libgrass_dbmidriver.so cannot be built without any implementations -of db__driver_* functions which should be specific to a db driver. - -To work around this problem, function pointers are defined to use -driver's implementations instead of those of the db stubs library. -To do this automatically, run '../mk_dbdriver_h.sh' in driver's -directory, #include "dbdriver.h" from main.c, and execute init_dbdriver(). - -Function pointers are defined in grass6/lib/db/dbmi_driver/dbstubs.h -This header file can be generated with -lib/db/dbmi_driver/mk_dbstubs_h.sh - -Please read lib/db/README diff --git a/db/drivers/README.md b/db/drivers/README.md new file mode 100644 index 00000000000..e98c617267c --- /dev/null +++ b/db/drivers/README.md @@ -0,0 +1,30 @@ +This directory contains drivers for the DBMI library. +The driver functions are for internal usage. + +The DBMI API to be used for module programming is available in: +`lib/db/` + +NOTE: +`db__driver_*` functions are implemented in a driver. If some of them +are not used or defined, the driver will use stub functions in +`lib/db/stubs/`. + +For some platforms like Cygwin, multiply defined symbols are not +resolved in a way that UNIX does. Even worse is that it is impossible +to build shared libraries with undefined symbols. For example, +`libgrass*dbmidriver.so` cannot be built without any implementations +of `db__driver*\*` functions which should be specific to a db driver. + +To work around this problem, function pointers are defined to use +driver's implementations instead of those of the db stubs library. +To do this automatically, run `../mk_dbdriver_h.sh` (GRASS GIS 6) +in driver's directory, `#include "dbdriver.h"` from `main.c`, and +execute `init_dbdriver()`. + +Function pointers are defined in `lib/db/dbmi_driver/dbstubs.h` +This header file can be generated with +`lib/db/dbmi_driver/mk_dbstubs_h.sh` (GRASS GIS 6). + +Please read lib/db/README.md and + + diff --git a/db/drivers/mysql/grass-mesql.html b/db/drivers/mysql/grass-mesql.html index e04b30a2a8f..3e2299b0d88 100644 --- a/db/drivers/mysql/grass-mesql.html +++ b/db/drivers/mysql/grass-mesql.html @@ -1,28 +1,7 @@ - - - -GRASS-MySQL embedded driver - GRASS GIS manual - - - - - - - -GRASS logo
    - -

    MySQL embedded driver in GRASS

    - -

    KEYWORDS

    - -database, attribute table, driver - -

    DESCRIPTION

    -MySQL database driver in GRASS enables GRASS to store vector -attributes in MySQL embedded database without necessity -to run MySQL server. +MySQL database driver enables GRASS to store vector attributes +in MySQL embedded database without necessity to run MySQL server.

    Driver and database name

    @@ -35,9 +14,9 @@

    Driver and database name

    before use of the driver. In the name of database it is possible to use 3 variables:
      -
    • $GISDBASE - path to current GISBASE -
    • $LOCATION_NAME - name of current location -
    • $MAPSET - name of current mapset +
    • $GISDBASE - path to current GISBASE
    • +
    • $LOCATION_NAME - name of current location
    • +
    • $MAPSET - name of current mapset

    @@ -68,14 +47,7 @@

    Troubleshooting: SQL syntax error

    Attempting to use a reserved SQL word as column or table name will result in a "SQL syntax" error. The list of reserved words for MySQL can be -found in the MySQL manual. - -

    SEE ALSO

    - - -db.connect, -SQL support in GRASS GIS - +found in the MySQL manual.

    AUTHOR

    @@ -84,11 +56,11 @@

    AUTHOR

    Credits: Development of the driver was sponsored by Faunalia (Italy) -as part of a project for ATAC. +as part of a project for ATAC. +

    SEE ALSO

    -
    -

    Main index - Database index - Topics index - Keywords Index - Full index

    -

    © 2003-2022 GRASS Development Team, GRASS GIS 8 Reference Manual

    - - + +db.connect, +SQL support in GRASS GIS + diff --git a/db/drivers/mysql/grass-mysql.html b/db/drivers/mysql/grass-mysql.html index af8c0273d98..f86dab367d3 100644 --- a/db/drivers/mysql/grass-mysql.html +++ b/db/drivers/mysql/grass-mysql.html @@ -1,7 +1,7 @@ -MySQL database driver enables GRASS to store vector attributes in -MySQL server. +MySQL database driver enables GRASS to store vector attributes +in MySQL server.

    Because vector attribute tables @@ -14,7 +14,7 @@

    Creating a MySQL database

    A new database is created within MySQL:
    -mysql> CREATE DATABASE mydb;
    +mysql> CREATE DATABASE mydb;
     
    See the MySQL manual for details. @@ -27,39 +27,41 @@

    Driver and database name

    The parameter 'database' can be given in two formats:
      -
    • Database name - in case of connection from localhost -
    • String of comma separated list of kye=value options. +
    • Database name - in case of connection from localhost
    • +
    • String of comma separated list of key=value options. Supported options are: -
        -
      • dbname - database name -
      • host - host name or IP address -
      • port - server port number -
      +
        +
      • dbname - database name
      • +
      • host - host name or IP address
      • +
      • port - server port number
      • +
      +
    +

    Examples of connection parameters: -

    -  db.connect driver=mysql database=mytest
    -  db.connect driver=mysql database='dbname=mytest,host=test.grass.org'
    -
    +
    +db.connect driver=mysql database=mytest
    +db.connect driver=mysql database='dbname=mytest,host=test.grass.org'
    +

    Data types

    GRASS supports almost all MySQL data types with following limitations:
      -
    • Binary columns (BINARY, VARBINARY, TINYBLOB, MEDIUMBLOB, - BLOB, LONGBLOB) are not not supported. - If a table with binary column(s) is used in GRASS - a warning is printed and only the supported columns are - returned in query results. +
    • Binary columns (BINARY, VARBINARY, TINYBLOB, MEDIUMBLOB, +BLOB, LONGBLOB) are not not supported. +If a table with binary column(s) is used in GRASS +a warning is printed and only the supported columns are +returned in query results.
    • -
    • Columns of type SET and ENUM are represented as string (VARCHAR). +
    • Columns of type SET and ENUM are represented as string (VARCHAR).
    • -
    • Very large integers in columns of type BIGINT can be lost - or corrupted because GRASS does not support 64 bin integeres - on most platforms. +
    • Very large integers in columns of type BIGINT can be lost +or corrupted because GRASS does not support 64 bin integeres +on most platforms.
    • -
    • GRASS does not currently distinguish types TIMESTAMP and - DATETIME. Both types are in GRASS interpreted as TIMESTAMP. +
    • GRASS does not currently distinguish types TIMESTAMP and +DATETIME. Both types are in GRASS interpreted as TIMESTAMP.

    Indexes

    @@ -80,11 +82,12 @@

    Privileges

    to other users you have to ask your MySQL server administrator to grant select privilege to them on the MySQL database used for that mapset. For example, to allow everybody to read data -in from your database 'mydb':
    -
    +in from your database 'mydb':
    +
    +
     shell> mysql --user=root mysql
     mysql> GRANT SELECT ON mydb.* TO ''@'%';
    -
    +

    Schemas

    @@ -104,7 +107,16 @@

    Troubleshooting: SQL syntax error

    Attempting to use a reserved SQL word as column or table name will result in a "SQL syntax" error. The list of reserved words for MySQL can be -found in the MySQL manual. +found in the MySQL manual. + +

    AUTHOR

    + +Radim Blazek + +

    +Credits: Development of the driver was sponsored by +Faunalia (Italy) +as part of a project for ATAC.

    SEE ALSO

    @@ -112,13 +124,3 @@

    SEE ALSO

    db.connect, SQL support in GRASS GIS
    - -

    Credits

    - -Development of the driver was sponsored by -Faunalia (Italy) -as part of a project for ATAC. - -

    AUTHOR

    - -Radim Blazek diff --git a/db/drivers/ogr/README b/db/drivers/ogr/README index b5fa80bb25e..fde41d85722 100644 --- a/db/drivers/ogr/README +++ b/db/drivers/ogr/README @@ -5,4 +5,4 @@ SQL DB support via OGR ----------------------------------------------------- General Documentation - http://www.remotesensing.org/gdal/ogr/ogr_sql.html + https://gdal.org/en/stable/user/ogr_sql_dialect.html diff --git a/db/drivers/ogr/grass-ogr.html b/db/drivers/ogr/grass-ogr.html index dbeca085ecf..c573f882288 100644 --- a/db/drivers/ogr/grass-ogr.html +++ b/db/drivers/ogr/grass-ogr.html @@ -7,7 +7,7 @@

    SEE ALSO

    SQL support in GRASS GIS - +

    diff --git a/db/drivers/postgres/README b/db/drivers/postgres/README index a2959739f6e..76d44e6bf6b 100644 --- a/db/drivers/postgres/README +++ b/db/drivers/postgres/README @@ -10,7 +10,7 @@ by G_warning(). FIELD TYPES: Field type code may change. See for example -http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/include/catalog/pg_type.h;hb=refs/heads/master +https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/include/catalog/pg_type.h;hb=refs/heads/master Because of this, type codes must be read from server when database is opened. @@ -19,7 +19,7 @@ Check also for PostgreSQL data types for defining them in GRASS: Supported types in ./globals.h: -(See https://www.postgresql.org/docs/9.4/interactive/datatype.html) +(See https://www.postgresql.org/docs/current/datatype.html) DB_C_TYPE_INT: bit, int2, smallint, int4, int, integer, int8, bigint, serial, oid diff --git a/db/drivers/postgres/execute.c b/db/drivers/postgres/execute.c index fa6dd556101..99a6c79df11 100644 --- a/db/drivers/postgres/execute.c +++ b/db/drivers/postgres/execute.c @@ -1,7 +1,7 @@ /*! \file db/driver/postgres/execute.c - \brief DBMI - Low Level PostgreSQL database driver - execute statemets + \brief DBMI - Low Level PostgreSQL database driver - execute statements This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. diff --git a/db/drivers/postgres/grass-pg.html b/db/drivers/postgres/grass-pg.html index cb707fa9314..721d9a035b0 100644 --- a/db/drivers/postgres/grass-pg.html +++ b/db/drivers/postgres/grass-pg.html @@ -5,7 +5,7 @@

    Creating a PostgreSQL database

    -A new database is created with createdb, see +A new database is created with createdb, see the PostgreSQL manual for details. @@ -76,7 +76,7 @@

    Geometry import from PostgreSQL table into GRASS

    PostGIS: PostgreSQL with vector geometry

    -PostGIS: +PostGIS: adds geographic object support to PostgreSQL.

    Example: Import from PostGIS

    @@ -114,15 +114,15 @@

    Example: Import from PostGIS

    Geometry Converters

      -
    • PostGIS with shp2pgsql:
      - shp2pgsql -D lakespy2 lakespy2 test > lakespy2.sql +
    • PostGIS with shp2pgsql:
      + shp2pgsql -D lakespy2 lakespy2 test > lakespy2.sql
    • -
    • e00pg: E00 to PostGIS filter, +
    • e00pg: E00 to PostGIS filter, see also v.in.e00.
    • GDAL/OGR ogrinfo and ogr2ogr: GIS vector format converter and library, e.g. ArcInfo or SHAPE to PostGIS.
      - ogr2ogr -f "PostgreSQL" shapefile ?? + ogr2ogr -f "PostgreSQL" shapefile ??
    @@ -143,6 +143,6 @@

    REFERENCES

    diff --git a/db/drivers/sqlite/grass-sqlite.html b/db/drivers/sqlite/grass-sqlite.html index ae59def1e52..57808ccb79c 100644 --- a/db/drivers/sqlite/grass-sqlite.html +++ b/db/drivers/sqlite/grass-sqlite.html @@ -34,7 +34,7 @@

    Operators available in conditions

    Browsing table data in DB

    -A convenient SQLite front-end is sqlitebrowser. +A convenient SQLite front-end is sqlitebrowser. To open a DB file stored within the current mapset, the following way is suggested (corresponds to above database connection): @@ -69,10 +69,14 @@

    SEE ALSO

    db.connect, db.execute, db.select -

    +
    +

    + SQL support in GRASS GIS -

    +
    +

    + SQLite web site, SQLite manual, - sqlite - Management Tools + sqlite - Management Tools diff --git a/display/d.extract/d.extract.html b/display/d.extract/d.extract.html index 39c37915ade..9e09ddadc03 100644 --- a/display/d.extract/d.extract.html +++ b/display/d.extract/d.extract.html @@ -6,7 +6,7 @@

    DESCRIPTION

    EXAMPLE

    -

    Graphically extract roads from a roads map:

    +

    Graphically extract roads from a roads map

     d.mon x0
     d.vect roads
    diff --git a/display/d.geodesic/d.geodesic.html b/display/d.geodesic/d.geodesic.html
    index d419b099c53..91c0497ef82 100644
    --- a/display/d.geodesic/d.geodesic.html
    +++ b/display/d.geodesic/d.geodesic.html
    @@ -33,7 +33,7 @@ 

    EXAMPLE

    -
    +Geodesic line (great circle line)
    Geodesic line (great circle line)
    diff --git a/display/d.graph/d.graph.html b/display/d.graph/d.graph.html index cd046bdf0e6..34017d34bc7 100644 --- a/display/d.graph/d.graph.html +++ b/display/d.graph/d.graph.html @@ -132,10 +132,10 @@

    COMMANDS

    symbol type size xper yper [line_color [fill_color]]
    A symbol is drawn at the given size on the display monitor. The xper and yper options define the center of the icon and -are given as a percentage of the display frame (0,0 is lower left). -The symbol can be any of those stored in $GISBASE/etc/symbol/ +are given as a percentage of the display frame (0,0 is lower left). +The symbol can be any of those stored in $GISBASE/etc/symbol/ (e.g. basic/circle) or stored in the user's mapset directory in the -form $MAPSET/symbol/type/name. +form $MAPSET/symbol/type/name. The colors may be either a standard color name, an R:G:B triplet, or "none". If using an R:G:B triplet, each color value can range from 0-255. If not specified the default line_color is black and the default @@ -207,7 +207,7 @@

    LIMITATIONS

    There are no automated ways of generating graphic images. It is anticipated that GRASS user sites will write programs to convert output from a resident graphics editor into GRASS d.graph format. -(e.g. EPS -> d.graph, perhaps with the help of a +(e.g. EPS -> d.graph, perhaps with the help of a pstoedit plugin)

    SEE ALSO

    diff --git a/display/d.grid/d.grid.html b/display/d.grid/d.grid.html index 7d775d71bc7..412a6a413ca 100644 --- a/display/d.grid/d.grid.html +++ b/display/d.grid/d.grid.html @@ -9,7 +9,7 @@

    DESCRIPTION

    If the user provides a -g flag a geographic (projected) grid will be drawn. With the -g flag the size argument accepts both decimal degrees and colon separated -ddd:mm:ss coordinates (eg. 00:30:00 for half of a degree). +ddd:mm:ss coordinates (eg. 00:30:00 for half of a degree). A geographic grid cannot be drawn for a latitude/longitude or XY projection. diff --git a/display/d.his/d.his.html b/display/d.his/d.his.html index 56f7d99bebc..49909d20d04 100644 --- a/display/d.his/d.his.html +++ b/display/d.his/d.his.html @@ -1,5 +1,9 @@

    DESCRIPTION

    +d.his displays the result obtained by combining hue, +intensity, and saturation (HIS) values from user-specified input +raster map layers. +

    HIS stands for hue, intensity, and saturation. This program produces a raster map layer providing a visually pleasing combination of hue, intensity, and diff --git a/display/d.histogram/pie.c b/display/d.histogram/pie.c index 9183f8ca736..9657312aec8 100644 --- a/display/d.histogram/pie.c +++ b/display/d.histogram/pie.c @@ -30,8 +30,10 @@ #include -#include #include +#include +#include +#include #include "pie.h" @@ -123,7 +125,10 @@ int pie(struct stat_list *dist_stats, /* list of distribution statistics */ i++; tic_every = tics[i].every; tic_unit = tics[i].unit; - strcpy(tic_name, tics[i].name); + if (G_strlcpy(tic_name, tics[i].name, sizeof(tic_name)) >= + sizeof(tic_name)) { + G_fatal_error(_("Tic name <%s> is too long"), tics[i].name); + } } else { if (is_fp && !cat_ranges) { diff --git a/display/d.labels/d.labels.html b/display/d.labels/d.labels.html index 61a9f94052e..0bcf9b07c57 100644 --- a/display/d.labels/d.labels.html +++ b/display/d.labels/d.labels.html @@ -7,7 +7,7 @@

    DESCRIPTION

    the v.label program or simply created by the user as an ASCII file (using a text editor) and placed in the appropriate directory under the user's current mapset and project -(i.e. $MAPSET/paint/labels/). +(i.e. $MAPSET/paint/labels/).

    NOTES

    diff --git a/display/d.legend.vect/d.legend.vect.html b/display/d.legend.vect/d.legend.vect.html index 43602b0e936..36a2e4e6533 100644 --- a/display/d.legend.vect/d.legend.vect.html +++ b/display/d.legend.vect/d.legend.vect.html @@ -1,6 +1,6 @@

    DESCRIPTION

    -Module d.legend.vect draws vector legend of currently displayed vector maps. +d.legend.vect draws vector legend of currently displayed vector maps.

    Parameter at defines the screen position of upper-left legend corner. Parameter columns defines the number of legend columns. diff --git a/display/d.legend/d.legend.html b/display/d.legend/d.legend.html index 9ac86a51a36..66706b390b5 100644 --- a/display/d.legend/d.legend.html +++ b/display/d.legend/d.legend.html @@ -104,7 +104,7 @@

    EXAMPLE

    -Elevation map with legend +Elevation map with legend
    Displaying the legend with custom labels and background: @@ -116,7 +116,7 @@

    EXAMPLE

    -Elevation map with custom legend +Elevation map with custom legend
    Displaying the legend with logarithmic scale: @@ -129,7 +129,7 @@

    EXAMPLE

    -Flow accumulation map with logarithmic legend +Flow accumulation map with logarithmic legend
    diff --git a/display/d.linegraph/d.linegraph.html b/display/d.linegraph/d.linegraph.html index 23a11463df9..a9782c98c2c 100644 --- a/display/d.linegraph/d.linegraph.html +++ b/display/d.linegraph/d.linegraph.html @@ -24,7 +24,7 @@

    File inputs

    should) be only relative paths to these files. While this is not recommended for scripting, it can be advantageous when typing the paths manually. For example when all files are stored in the directory -/home/john/data, the user can provide the following in the command +/home/john/data, the user can provide the following in the command line:
    @@ -145,7 +145,7 @@ 

    EXAMPLE

    -->

    -The next command sequence creates a file plot.png in the current +The next command sequence creates a file plot.png in the current directory which is the drawing made by d.linegraph.

    diff --git a/display/d.mon/d.mon.html b/display/d.mon/d.mon.html
    index ed1e6cb2344..9f76cff9880 100644
    --- a/display/d.mon/d.mon.html
    +++ b/display/d.mon/d.mon.html
    @@ -52,8 +52,8 @@ 

    Releasing (unselecting) a monitor

    NOTES

    d.mon is designed for interactive use. If non-interactive use -is needed (e.g., in a script) set GRASS_RENDER_IMMEDIATE=png -(or =cairo) and use the related environment +is needed (e.g., in a script) set GRASS_RENDER_IMMEDIATE=png +(or =cairo) and use the related environment variables to control output size etc.

    EXAMPLES

    @@ -73,7 +73,7 @@

    wx0 monitor

    Figure: The initialization of display monitor wx0
    -All subsequently displayed data will be rendered on monitor wx0. +All subsequently displayed data will be rendered on monitor wx0.
     g.region raster=elevation -p
    @@ -95,7 +95,7 @@ 

    CAIRO file renderer monitor

    From this moment on all displayed data will be rendered into -file output.pdf. +file output.pdf.

    List running monitors

    diff --git a/display/d.mon/render_cmd.py b/display/d.mon/render_cmd.py index cf153c0b017..225a82ebe9b 100644 --- a/display/d.mon/render_cmd.py +++ b/display/d.mon/render_cmd.py @@ -111,8 +111,8 @@ def adjust_region(width, height): region["nsres"] = mapheight / height region["ewres"] = mapwidth / width - region["rows"] = int(round(mapheight / region["nsres"])) - region["cols"] = int(round(mapwidth / region["ewres"])) + region["rows"] = round(mapheight / region["nsres"]) + region["cols"] = round(mapwidth / region["ewres"]) region["cells"] = region["rows"] * region["cols"] kwdata = [ diff --git a/display/d.path/d.path.html b/display/d.path/d.path.html index b98e5d90dde..922e0582f75 100644 --- a/display/d.path/d.path.html +++ b/display/d.path/d.path.html @@ -10,7 +10,7 @@

    NOTE

    The user needs to display a vector map before using d.path. If no graphics monitor -is open, a file map.png is generated in the current directory. +is open, a file map.png is generated in the current directory.

    The 'from' and 'to' points are entered by mouse into the map displayed in the GRASS monitor, diff --git a/display/d.profile/d.profile.html b/display/d.profile/d.profile.html index 1f4e789639a..cb17088d4de 100644 --- a/display/d.profile/d.profile.html +++ b/display/d.profile/d.profile.html @@ -1,6 +1,6 @@

    DESCRIPTION

    -This command displays the profile for a specified transect. +d.profile displays the profile for a specified transect.

    SEE ALSO

    diff --git a/display/d.rast.arrow/d.rast.arrow.html b/display/d.rast.arrow/d.rast.arrow.html index 5face5ec59c..912daad0edf 100644 --- a/display/d.rast.arrow/d.rast.arrow.html +++ b/display/d.rast.arrow/d.rast.arrow.html @@ -17,7 +17,7 @@

    DESCRIPTION

    of that category. Cells containing null data will be marked with an "X". You can disable drawing of null data and unknown aspect values by -setting its color to "none". +setting its color to "none".

    When specifying the magnitude_map option, arrow lengths denoting magnitude will be extracted from the cell values of the specified @@ -35,7 +35,7 @@

    DESCRIPTION

    the raster map is relatively close in scale. You can use the skip option to draw arrows every n-th cell in both directions if you are working with relatively high resolutions. It may be useful to disable the grid in -this case, which is accomplished by setting its color to "none". +this case, which is accomplished by setting its color to "none".

    For GRASS and Compass type aspect maps, the cell values of the aspect map will determine the corresponding direction in 360 degrees. ANSWERS type aspect maps will be plotted in multiples of 15 degrees counterclockwise from east, and diff --git a/display/d.rast.num/d.rast.num.html b/display/d.rast.num/d.rast.num.html index 6566c940b7d..f35c1c5abf9 100644 --- a/display/d.rast.num/d.rast.num.html +++ b/display/d.rast.num/d.rast.num.html @@ -24,7 +24,8 @@

    NOTES

    EXAMPLE

    -A) Distance from the streams network (North Carolina sample dataset): +Distance from the streams network (North Carolina sample dataset): +
     g.region raster=streams_derived -p
     r.grow.distance input=streams_derived distance=dist_from_streams
    @@ -32,7 +33,7 @@ 

    EXAMPLE

    -
    +Euclidean distance from the streams network in meters
    Euclidean distance from the streams network in meters (detail, numbers shown with d.rast.num)
    diff --git a/display/d.rgb/d.rgb.html b/display/d.rgb/d.rgb.html index 9b706f2b3ce..deae068cb19 100644 --- a/display/d.rgb/d.rgb.html +++ b/display/d.rgb/d.rgb.html @@ -1,5 +1,8 @@

    DESCRIPTION

    +d.rgb displays three user-specified raster maps as red, green, +and blue overlays in the active graphics frame. +

    RGB stands for red, green, and blue. d.rgb visually combines three raster maps to form a color image. For each map, the corresponding component from the diff --git a/display/d.rhumbline/d.rhumbline.html b/display/d.rhumbline/d.rhumbline.html index e0c6abd6974..ed6fb7a2b10 100644 --- a/display/d.rhumbline/d.rhumbline.html +++ b/display/d.rhumbline/d.rhumbline.html @@ -34,7 +34,7 @@

    EXAMPLE

    -
    +
    Rhumbline (loxodrome)
    diff --git a/display/d.text/d.text.html b/display/d.text/d.text.html index 3679f7e0d94..46f257ddfdb 100644 --- a/display/d.text/d.text.html +++ b/display/d.text/d.text.html @@ -89,7 +89,7 @@

    EXAMPLE

    - +
    Displayed Text
    diff --git a/display/d.text/test.py b/display/d.text/test.py index 1089096d2ef..dd8836938a5 100755 --- a/display/d.text/test.py +++ b/display/d.text/test.py @@ -52,7 +52,7 @@ def text(in_text): for i in range(36): font(fonts[int(i % len(fonts))]) - size((36 - i if ((i >= 9 and i <= 18) or i > 27) else i) % 9) + size((36 - i if ((9 <= i <= 18) or i > 27) else i) % 9) rotate(i * 10) color(colors[i % len(colors)]) xy( @@ -73,8 +73,8 @@ def text(in_text): print( ".L 0\n" + re.sub( - '(".*?")', + r'(".*?")', "\n.C red\n,\\g<0>\n.C gray\n", - re.sub("\n", "\n.L 1\n.L 0\n", re.sub("(?m)^#.*\n?", "", src)), + re.sub(r"\n", "\n.L 1\n.L 0\n", re.sub(r"(?m)^#.*\n?", "", src)), ) ) diff --git a/display/d.title/d.title.html b/display/d.title/d.title.html index 15a0d8e75cd..be02c674f60 100644 --- a/display/d.title/d.title.html +++ b/display/d.title/d.title.html @@ -25,17 +25,18 @@

    EXAMPLES

    For example, a user wishing to create a suitable TITLE for the Spearfish, SD soils map layer and to display this TITLE in the active display frame on the graphics monitor might type the following: -
    -
    d.title map=soils color=red size=5 > TITLE.file -
    d.text < TITLE.file -
    + +
    +d.title map=soils color=red size=5 > TITLE.file
    +d.text < TITLE.file
    +
    + Alternately, the user might pipe d.title output directly into d.text: -
    -
    -d.title map=soils color=red size=5 | -d.text -
    + +
    +d.title map=soils color=red size=5 | d.text
    +

    A file created by d.title can be displayed with d.text. @@ -45,8 +46,10 @@

    EXAMPLES

    SEE ALSO

    -d.font
    -d.text
    + +d.font, +d.text +

    AUTHOR

    diff --git a/display/d.vect.chart/d.vect.chart.html b/display/d.vect.chart/d.vect.chart.html index f109e49208d..58f1cd5c1bf 100644 --- a/display/d.vect.chart/d.vect.chart.html +++ b/display/d.vect.chart/d.vect.chart.html @@ -7,9 +7,9 @@

    NOTES

    The charts are positioned as follows:
      -
    • vector points: on point position, -
    • vector lines: on line centers, -
    • vector areas: on area centroids. +
    • vector points: on point position,
    • +
    • vector lines: on line centers,
    • +
    • vector areas: on area centroids.
    Bar charts are placed with their lower edge starting from the y-coordinate diff --git a/display/d.vect.thematic/d.vect.thematic.html b/display/d.vect.thematic/d.vect.thematic.html index 52ce4908d38..a40a31005b4 100644 --- a/display/d.vect.thematic/d.vect.thematic.html +++ b/display/d.vect.thematic/d.vect.thematic.html @@ -40,8 +40,8 @@

    Thematic map with classes

    Thematic map with calculated class breaks

    -The following example uses a calculated attribute (density = -pop/area) and the standard deviation algorithm to calculate class +The following example uses a calculated attribute (density = +pop/area) and the standard deviation algorithm to calculate class breaks for 5 classes:
    diff --git a/display/d.vect/d.vect.html b/display/d.vect/d.vect.html
    index 4ba8494c41f..38341d0afcb 100644
    --- a/display/d.vect/d.vect.html
    +++ b/display/d.vect/d.vect.html
    @@ -5,14 +5,14 @@ 

    DESCRIPTION

    NOTES

    -d.vect can simply be used typing d.vect -map=vector_map. There are a large variety of optional parameters +d.vect can simply be used typing d.vect +map=vector_map. There are a large variety of optional parameters which allow the user to specify vector type, colors, data fields, SQL queries, label size and justification, etc.

    When d.vect is used with where parameter on MS Windows -Command Prompt, it is important to use ˆ -carret symbol for escaping special characters < > ( ) & | , ; ". +Command Prompt, it is important to use ˆ +carret symbol for escaping special characters < > ( ) & | , ; ".

     d.vect map=vector_map where="cat ˆ> 10 AND cat ˆ< 20"
     
    @@ -32,7 +32,7 @@

    NOTES

    Feature colors may be specified by v.colors in a form of color -table or in an attribute table column containing RRR:GGG:BBB +table or in an attribute table column containing RRR:GGG:BBB values.

    A table for a vector map might look like this: diff --git a/display/displaydrivers.html b/display/displaydrivers.html index e44fe723a47..2caab56be77 100644 --- a/display/displaydrivers.html +++ b/display/displaydrivers.html @@ -3,16 +3,16 @@ The current command line rendering mechanism is direct rendering into a file. The driver is selected by setting -the GRASS_RENDER_IMMEDIATE variable or by +the GRASS_RENDER_IMMEDIATE variable or by running d.mon module.

    List of available display drivers:

    NOTES

    diff --git a/doc/development/branching_how-to.md b/doc/development/branching_how-to.md new file mode 100644 index 00000000000..7c418052872 --- /dev/null +++ b/doc/development/branching_how-to.md @@ -0,0 +1,142 @@ +# Branching How-To + +## Assumptions + +Given the creation of a new release branch will typically happen right +before RC1 of a new release series, please see assumptions in the +`howto_release.md` document. + +## Create a New Branch + +Given how the branch protection rules are set up and work, +you need to bypass protection against merge commits on branches. +(We don't want any new merge commits. However, there are merge commits +from the past and they prevent the creation of a new branch when the rules are +applied.) +To bypass, go to _Settings > Rules > Rulesets > Rules for release branches_. +Press _Add bypass_ and add the team or user who is creating the branch. + +Use GitHub web interface to create a new branch: + +1. Go to _branches_. +2. Copy the name of one of the existing branches. +3. Click _New branch_. +4. Paste the name of the existing branch. +5. Modify the name. +6. Click _Create branch_. + +As an alternative to creation in GitHub, you can +[create a new branch using command line](#create-in-command-line). + +Note down the latest commit hash on the branch to record it in the +[release history overview](https://grass.osgeo.org/about/history/releases/). +The instructions for updating the website come later in the procedure. + +Remove the bypass in _Settings_. + +## Check the Version + +```bash +git fetch upstream +git switch releasebranch_8_4 +``` + +## Increase Version on the main Branch + +The version number needs to be increased on the main branch. + +```bash +git switch main +git fetch upstream +git rebase upstream/main +``` + +Update the version in the source code (use `minor` or `major`): + +```bash +./utils/update_version.py minor +``` + +If you are using a clone you use for building GRASS GIS, +clean up (`make distclean`) your GRASS GIS build to remove +the now outdated generated version numbers. +(You don't need to build GRASS GIS if you have a fresh clone.) + +Search for all other mentions of the last few versions to see +if they need to be updated, for example: + +```bash +grep --exclude-dir=.git -IrnE "[^0-9^a-z]8[\._][0-9][^0-9]" +grep --exclude-dir=.git -IrnE "grass8.?[0-9]" +``` + +After the check and update, commit + +```bash +git switch -c update-version +git add -p +./utils/update_version.py suggest +git commit -m "version: ..." +git push +``` + +Create a PR and review and merge it as soon as possible to avoid having +the wrong version on the branch in case other PRs need to be merged. + +## Server Updates + +On grass.osgeo.org (grasslxd), new version directories need to be created: + +```bash +cd /var/www/code_and_data/ +VER=grass85 +mkdir -p ${VER}/manuals \ + ${VER}/source/snapshot \ + ${VER}/binary/mswindows/native \ + ${VER}/binary/linux/snapshot +``` + +## Updates of grass-addon Repo + +* Add new branch into +[`.github/workflows/ci.yml`](https://github.com/OSGeo/grass-addons/blob/grass8/.github/workflows/ci.yml). + +## Website Updates + +Add the branch creation to the release history (use the commit hash you saved earlier): + + + +## Additional Notes + +### Making Changes in General + +If you make changes, commit them: + +```bash +git commit -m "version: ..." +git push +``` + +(You can directly push to release branches.) + +### Create in Command Line + +Get the latest main branch: + +```bash +git switch main +git fetch upstream +git rebase upstream/main +``` + +Create the branch: + +```bash +git switch -c releasebranch_8_4 +``` + +Push the version to the upstream repo: + +```bash +git push upstream diff --git a/doc/development/rfc/README.md b/doc/development/rfc/README.md index 590be34e169..5ee9ff1d805 100644 --- a/doc/development/rfc/README.md +++ b/doc/development/rfc/README.md @@ -16,6 +16,8 @@ A list of all GRASS GIS RFC documents, with status: - [RFC 7: Language Standards Support](language_standards_support.md) (Adopted) - [RFC 8: Python Language Support](python_language_support.md) (Adopted) - [RFC 9: Version Numbering](version_numbering.md) (Adopted) +- [RFC 10: C++17 Language Standard Support](RFC10_cpp17_language_standards_support.md) + (Adopted) - [RFC X: Release Policy](https://github.com/OSGeo/grass/pull/3673) (Draft) Status values: diff --git a/doc/development/rfc/RFC10_cpp_language_standards_support.md b/doc/development/rfc/RFC10_cpp_language_standards_support.md new file mode 100644 index 00000000000..f913c72b396 --- /dev/null +++ b/doc/development/rfc/RFC10_cpp_language_standards_support.md @@ -0,0 +1,35 @@ +# RFC 10: C++17 Standard Support for GRASS GIS 8.5 + +Author of the first draft: Nicklas Larsson + +Status: Motion passed, 28 November 2024 + +## Summary + +Set a new minimum build requirement for C++ code to support the C++17 standard. This +supersedes the [RFC 7: Language Standards Support](language_standards_support.md) +with respect to C++ standard support. + +## Background + +The RFC 7, which set the minimum supported C++ standard to C++11, was adopted 3.5 +years ago. At that time, although the latest versions of common compilers had +full support for C++17, neither the GRASS GIS code itself nor its dependencies +needed or required it. Now, compilers with full C++17 support are available also +in what may be considered stable or long-term-support systems. +Moreover, important dependencies such as PDAL 2.4 (released in March 2022) and +GDAL 3.9 (May 2024) require C++17 support, and so will the future release of +GEOS 3.14. + +## C++17 standard for GRASS GIS + +The time has come to increase the minimal C++ standard support for GRASS GIS +code to the C++17 standard. This enables the use of new C++17 features, if and when +so is needed or recommended. There is, however, no need to make any immediate +changes to current code. Continuous integration runners already compile in +C++17 mode, making sure nothing will be broken. + +## References on C++17 features and support + +- [Wikipedia](https://en.wikipedia.org/wiki/C%2B%2B17) +- [cppreference.com](https://en.cppreference.com/w/cpp/17) diff --git a/doc/development/rfc/legal_aspects_of_code_contributions.md b/doc/development/rfc/legal_aspects_of_code_contributions.md index 596cf301183..9607569c45e 100644 --- a/doc/development/rfc/legal_aspects_of_code_contributions.md +++ b/doc/development/rfc/legal_aspects_of_code_contributions.md @@ -66,4 +66,4 @@ source repository: Questions regarding GRASS GIS should be directed to the GRASS Development Team at the following address: -Internet: +Internet: diff --git a/doc/gui/wxpython/example/Makefile b/doc/examples/gui/wxpython/Makefile similarity index 100% rename from doc/gui/wxpython/example/Makefile rename to doc/examples/gui/wxpython/Makefile diff --git a/doc/gui/wxpython/example/README b/doc/examples/gui/wxpython/README similarity index 96% rename from doc/gui/wxpython/example/README rename to doc/examples/gui/wxpython/README index 1e619486943..c615ed57452 100644 --- a/doc/gui/wxpython/example/README +++ b/doc/examples/gui/wxpython/README @@ -20,7 +20,7 @@ or it can be launched from the console. 1. Go to GRASS root directory -2. Copy directory ./doc/gui/wxpython/example to ./gui/wxpython/example +2. Copy directory ./doc/examples/gui/wxpython to ./gui/wxpython 3. Edit ./gui/wxpython/Makefile: diff --git a/doc/gui/wxpython/example/dialogs.py b/doc/examples/gui/wxpython/dialogs.py similarity index 100% rename from doc/gui/wxpython/example/dialogs.py rename to doc/examples/gui/wxpython/dialogs.py diff --git a/doc/gui/wxpython/example/frame.py b/doc/examples/gui/wxpython/frame.py similarity index 99% rename from doc/gui/wxpython/example/frame.py rename to doc/examples/gui/wxpython/frame.py index 7f68642de9f..e947ee1a6b3 100644 --- a/doc/gui/wxpython/example/frame.py +++ b/doc/examples/gui/wxpython/frame.py @@ -41,8 +41,8 @@ from example.dialogs import ExampleMapDialog # It is possible to call grass library functions (in C) directly via ctypes -# however this is less stable. Example is available in trunk/doc/python/, ctypes -# are used in nviz, vdigit, iclass gui modules. +# however this is less stable. Example is available in trunk/doc/examples/python/, +# ctypes are used in nviz, vdigit, iclass gui modules. # from ctypes import * # try: diff --git a/doc/examples/gui/wxpython/g.gui.example.html b/doc/examples/gui/wxpython/g.gui.example.html new file mode 100644 index 00000000000..c11ec5b2b8b --- /dev/null +++ b/doc/examples/gui/wxpython/g.gui.example.html @@ -0,0 +1,66 @@ + + +

    DESCRIPTION

    + +

    +The purpose of the Example Tool is to make life easier +for new wxGUI developers. It can serve as a basic template when +creating standalone GRASS GUI-based application. Example tool +can display one raster map a show information about it. +

    + +

    +Following topics are covered: +

    + +
      +
    • creating standalone window
    • +
    • adding toolbars, statusbar
    • +
    • displaying raster map
    • +
    • running GRASS modules from application
    • +
    • creating dialog for element (raster, vector, ...) selection
    • +
    • using temporary region
    • +
    • access from main menu
    • +
    • writing programmer documentation
    • +
    • writing user documentation
    • +
    + +

    NOTE

    + +See README to learn how to get Example Tool to work. + + + +

    EXAMPLE TOOL TOOLBAR

    + +
    +
      + Select raster layer
    +
    Select raster layer and compute statistics related to this layer.
    +
    + +

    SEE ALSO

    + + + wxGUI, + wxGUI components + + + + +

    AUTHOR

    + +Anna Kratochvilova, +Czech Technical University in Prague, Czech Republic diff --git a/doc/gui/wxpython/example/g.gui.example.py b/doc/examples/gui/wxpython/g.gui.example.py similarity index 100% rename from doc/gui/wxpython/example/g.gui.example.py rename to doc/examples/gui/wxpython/g.gui.example.py diff --git a/doc/gui/wxpython/example/toolbars.py b/doc/examples/gui/wxpython/toolbars.py similarity index 100% rename from doc/gui/wxpython/example/toolbars.py rename to doc/examples/gui/wxpython/toolbars.py diff --git a/doc/examples/notebooks/README.md b/doc/examples/notebooks/README.md new file mode 100644 index 00000000000..9aaf8e5d226 --- /dev/null +++ b/doc/examples/notebooks/README.md @@ -0,0 +1,32 @@ +# Notebooks + +## Introduction + +### Using the notebooks locally + +Clone this repository with `git clone` first. Then, locally start the Jupyter +notebook server from the command line in the `doc/examples/notebooks/` +directory containing the `*.ipynb` files with: `jupyter notebook` + +This will open a new browser tab or window with a list of the contents of the +current working directory. Clicking on one of the `*.ipynb` files will start +the notebook. + +See also the official documentation for +[The Jupyter Notebook](https://jupyter-notebook.readthedocs.io/en/latest/). + +### Introductory notebooks to GRASS GIS and Jupyter + +* GRASS GIS in Jupyter Notebook with Python and grass.jupyter: + [jupyter_example.ipynb](jupyter_example.ipynb) +* The grass.jupyter Package: [jupyter_tutorial.ipynb](jupyter_tutorial.ipynb) + +## Thematic Jupyter notebooks + +* Viewshed Analysis: [viewshed_analysis.ipynb](viewshed_analysis.ipynb) +* Spatio-Temporal Analysis with grass.jupyter: [temporal.ipynb](temporal.ipynb) +* Solar Energy Potential Analysis: + [solar_potential.ipynb](solar_potential.ipynb) +* GRASS GIS Scripting with Python: + [scripting_example.ipynb](scripting_example.ipynb) +* Hydrology with GRASS GIS: [hydrology.ipynb](hydrology.ipynb) diff --git a/doc/notebooks/hydrology.ipynb b/doc/examples/notebooks/hydrology.ipynb similarity index 100% rename from doc/notebooks/hydrology.ipynb rename to doc/examples/notebooks/hydrology.ipynb diff --git a/doc/notebooks/jupyter_example.ipynb b/doc/examples/notebooks/jupyter_example.ipynb similarity index 100% rename from doc/notebooks/jupyter_example.ipynb rename to doc/examples/notebooks/jupyter_example.ipynb diff --git a/doc/notebooks/jupyter_tutorial.ipynb b/doc/examples/notebooks/jupyter_tutorial.ipynb similarity index 100% rename from doc/notebooks/jupyter_tutorial.ipynb rename to doc/examples/notebooks/jupyter_tutorial.ipynb diff --git a/doc/notebooks/parallelization_tutorial.ipynb b/doc/examples/notebooks/parallelization_tutorial.ipynb similarity index 100% rename from doc/notebooks/parallelization_tutorial.ipynb rename to doc/examples/notebooks/parallelization_tutorial.ipynb diff --git a/doc/notebooks/scripting_example.ipynb b/doc/examples/notebooks/scripting_example.ipynb similarity index 100% rename from doc/notebooks/scripting_example.ipynb rename to doc/examples/notebooks/scripting_example.ipynb diff --git a/doc/notebooks/solar_potential.ipynb b/doc/examples/notebooks/solar_potential.ipynb similarity index 100% rename from doc/notebooks/solar_potential.ipynb rename to doc/examples/notebooks/solar_potential.ipynb diff --git a/doc/notebooks/temporal.ipynb b/doc/examples/notebooks/temporal.ipynb similarity index 100% rename from doc/notebooks/temporal.ipynb rename to doc/examples/notebooks/temporal.ipynb diff --git a/doc/notebooks/viewshed_analysis.ipynb b/doc/examples/notebooks/viewshed_analysis.ipynb similarity index 100% rename from doc/notebooks/viewshed_analysis.ipynb rename to doc/examples/notebooks/viewshed_analysis.ipynb diff --git a/doc/python/README b/doc/examples/python/README similarity index 88% rename from doc/python/README rename to doc/examples/python/README index e5f48fad99e..51c5c1ce9dc 100644 --- a/doc/python/README +++ b/doc/examples/python/README @@ -7,7 +7,7 @@ There are two ways of using Python to run GRASS commands: - Module creation using hooks into the C library functions using ctypes. - The scripts in the doc/python/examples/ directory will describe this. + The scripts in the doc/examples/python/ directory will describe this. (Ctypes is standard in Python 2.5 and newer; replaces the SWIG implementation in GRASS) diff --git a/doc/python/m.distance.py b/doc/examples/python/m.distance.py similarity index 98% rename from doc/python/m.distance.py rename to doc/examples/python/m.distance.py index 2383cbfb669..e6e26bc2842 100755 --- a/doc/python/m.distance.py +++ b/doc/examples/python/m.distance.py @@ -20,7 +20,7 @@ ############################################################################ # # Requires GRASS Python Ctypes interface -# Requires Numeric module (NumPy) from http://numpy.scipy.org/ +# Requires Numeric module (NumPy) from https://numpy.org # # %module diff --git a/doc/python/raster_example_ctypes.py b/doc/examples/python/raster_example_ctypes.py similarity index 100% rename from doc/python/raster_example_ctypes.py rename to doc/examples/python/raster_example_ctypes.py diff --git a/doc/python/script/Makefile b/doc/examples/python/script/Makefile similarity index 100% rename from doc/python/script/Makefile rename to doc/examples/python/script/Makefile diff --git a/doc/python/script/r.example.html b/doc/examples/python/script/r.example.html similarity index 100% rename from doc/python/script/r.example.html rename to doc/examples/python/script/r.example.html diff --git a/doc/python/script/r.example.py b/doc/examples/python/script/r.example.py similarity index 100% rename from doc/python/script/r.example.py rename to doc/examples/python/script/r.example.py diff --git a/doc/python/vector_example_ctypes.py b/doc/examples/python/vector_example_ctypes.py similarity index 100% rename from doc/python/vector_example_ctypes.py rename to doc/examples/python/vector_example_ctypes.py diff --git a/doc/raster/r.example/COMMENTS b/doc/examples/raster/r.example/COMMENTS similarity index 100% rename from doc/raster/r.example/COMMENTS rename to doc/examples/raster/r.example/COMMENTS diff --git a/doc/raster/r.example/Makefile b/doc/examples/raster/r.example/Makefile similarity index 89% rename from doc/raster/r.example/Makefile rename to doc/examples/raster/r.example/Makefile index 8636720f1df..f6061377f78 100644 --- a/doc/raster/r.example/Makefile +++ b/doc/examples/raster/r.example/Makefile @@ -1,6 +1,6 @@ # fix this relative to include/ # or use absolute path to the GRASS source code -MODULE_TOPDIR = ../../.. +MODULE_TOPDIR = ../../../.. PGM = r.example diff --git a/doc/raster/r.example/main.c b/doc/examples/raster/r.example/main.c similarity index 100% rename from doc/raster/r.example/main.c rename to doc/examples/raster/r.example/main.c diff --git a/doc/raster/r.example/r.example.html b/doc/examples/raster/r.example/r.example.html similarity index 100% rename from doc/raster/r.example/r.example.html rename to doc/examples/raster/r.example/r.example.html diff --git a/doc/examples/vector/TODO b/doc/examples/vector/TODO new file mode 100644 index 00000000000..d43bffca15d --- /dev/null +++ b/doc/examples/vector/TODO @@ -0,0 +1,258 @@ + GRASS 6 vector TODO + --------------------- + (Radim Blazek, May 2006) + +This document is summary of my ideas on how vector part of GRASS GIS +could be improved. + +It can be that you come to conclusion that vectors in GRASS are bad +and it is necessary to start from scratch. In that case I would +recommend to leave current library and modules intact and start the +new work in parallel (the new modules could start with w.* or v2.*). I +was thinking for example about completely new vector library based on +OGC standard, using either OGR directly or an abstraction layer and +OGR as an option (driver). That does not mean that I prefer simple +feature specification over current GRASS implementation, I am not sure +which one is better. In any case it would be pity to drop current +topological format with all its flexibility. Each approach has +advantages and disadvantages. I think that it the best to have in OS +GIS all alternatives file/database and topology/simple feature. + + + +Historical notes +---------------- +The current implementation of vectors is based on previous work which +was present in GRASS5 (the vector library and modules and DBMI +library). We started this work together with David D. Gray in autumn +2000 (IIRC) but David had to leave GRASS project soon so that I almost +all responsibility for vector development in GRASS6 and its results is +mine. + +The current design of GRASS vectors is result of these factor: ++ very limited resources for development (necessity to use existing + free code/libraries/applications whenever possible) ++ relatively little experience with development of GIS application ++ respect for certain features of GRASS5 vector model and for existing + community which is using it ++ bad experience with quality of data produced in simple feature based + applications (ArcView) + + +1. Library +---------- + +1.1 Geometry +------------ + +Keep topology and spatial index in file instead of in memory +------------------------------------------------------------ +Scalability seems to be currently the biggest problem of GRASS +vectors. The geometry of GRASS vectors (coor file) is never loaded +whole to the memory. OTOH the support structures (topology and spatial +index) are loaded to memory on runtime. It should be possible to use +files for topology and spatial index also on runtime and that way +decrease the memory occupied by running module (practically to +zero). The speed will decrease a bit but not significantly because +files are usually cached by system. + +* Update: implemented in r38385 (2009/07) by Markus Metz + +Temporary vector +---------------- +Analytical modules process data in the output vector (for example +v.overlay and v.buffer). Because many lines can be deleted (broken +lines for example) and new lines are written at the end of coor file +the output file can contain many 'dead' lines (not used space). It +would be better to do processing in a temporary vector and copy only +alive lines to the output when processing finished. That means +implement Vect_open_temporary() which will work like Vect_open_new() +but the files will be opened in temporary directory (it should not +write to $MAPSET/vector). + + +Recycle deleted lines +--------------------- +The space which was occupied by a line in coor file is lost after call +to Vect_delete_line(). A list of the free positions be kept and +Vect_write_line() should write in that free space if possible instead +of to append a new line to the end of the file. There is already empty +structure 'recycle' in 'dig_head' where the list could be implemented +(without changing 'dig_head' size, to keep binary compatibility). + +* Note: currently wxGUI vector digitizer 'undo' depends on this 'feature' + +Vect_rewrite_line +----------------- +Implement properly Vect_rewrite_line(). Currently it simply calls +Vect_delete_line() and Vect_write_line(). It should be implemented so +that if the new size of the line is the same as the old size it will +be written in the same place in the coor file where the original line +existed. + +* Note: see above + +Remove bounding box from support structures (?) +----------------------------------------------- +The vector structures (P_line, P_area, P_isle) store bounding box in +N,S,E,W,T,B (doubles). Especially in case of element type GV_POINT the +bounding box occupies a lot of space (2-3 times more than the point +itself). I am not sure if this is really good idea, it is necesssary to +valutate also how often Vect_line_box() is called and the impact of +the necessity to calculate always the box on the fly (when it is not +stored in the structure) which can be time consuming for example for +areas or long lines. + +* See also https://trac.osgeo.org/grass/ticket/542 +* Update: implemented in r46898 (2011/07) by Markus Metz + +Switching to update mode +------------------------ +It would be useful to have a possibility to switch to 'update' mode a +map which was opened by Vect_open_old/new() and similarly to switch +back to 'normal' mode. Currently it is necessary to call Vect_close() +and Vect_open_update(). + +Layer names +----------- +The layers are currently identified only by numbers but it is possible +to assign to each layer number a name. The library can read these +names but it is not possible to use the name as parameter for +modules. It is necessary to write int Vect_get_layer_by_name ( struct +Map_info *map, char *name) which will accept both names and numbers +and use this function in vector modules. This is also important for +OGR interface improvements (see below). + +* Update: implemented in r38548 (2009/07) by Martin Landa + +OGR interface +------------- +It is important to enable direct access to OGR data sources without +v.external and without necessity to store anything in files. The +problem of v.external is that topology is stored in file that means it +can be wrong when the source is opened next time. It should be +relatively easy to call Vect_build_ogr() whenever an OGR vector is +opened with level2 (topology) requested and topology will be built on +the fly. OGR vectors would be specified by virtual mapset name +'OGR'. Each OGR datasource will be equivalent to GRASS vector and each +OGR layer will be equivalent to GRASS layer (it is necessary to +implement layer names, see above). It would be for example possible to +display a shapefile or PostGIS layers directly: + + d.vect map=./shapefiles/@OGR layer=roads # display shapefile ./shapefiles/roads.shp + d.vect map=PG:dbname=test@OGR layer=roads # display table roads from database test + +* Update: in progress, + see https://trac.osgeo.org/grass/wiki/Grass7/VectorLib/OGRInterface#DirectOGRreadaccess + +Simple feature API and sequential reading +----------------------------------------- +Most GRASS modules are currently using random access to the data which +reflects GRASS format. This works well with GRASS data but it can +become very slow or even impossible with OGR data sources because some +OGR drivers don't support random access or random access is very +slow. Because conversion from topological format to simple feature is +very simple and sequential reading of GRASS vectors is not problem it +would be desirable to implement in GRASS vector library 'simple +feature' API to GRASS vectors and map it directly to OGR API in case +of OGR data sources. Then many GRASS modules can be modified to use +sequentil reading and simple feature API and that will make more +efficient processing of data directly read from OGR data sources. + + +1.2 Attributes +-------------- + +In general I found the use of true RDBMS for attributes as a +problem. The data are stored in two distinct places (vector files + +database) and it makes it difficult to keep them consistent and manage +(move, backup). Another problem is random access to the data in RDBMS +from an application which is terribly slow (due to communication with +server). RDBMS is not bad, bad is the combination of files and +RDBMS. I think that either everything must be stored in RDBMS +(PostGIS) or nothing. Eric G. Miller (IIRC) was right when he said +that data are 'too distant' when RDBMS is used with geometry in file. + +I think that more work should be done on the drivers which are using +embedded databases stored in files (SQLite,MySQL,DBF) with scope to +reach similar functionality (functions, queries) which are in true +RDBMS without penalty of communication with server. It should be also +considered the possibility to change the default location of database +files to vector directory ($MAPSET/vector/test). That means to keep +all the data of one vector in a single directory. It is already +possible but it is not the default settings, for example: + + db.connect driver=dbf database='$GISDBASE/$LOCATION_NAME/$MAPSET/vector/$MAP/' + db.connect driver=sqlite database='$GISDBASE/$LOCATION_NAME/$MAPSET/vector/$MAP/db.sqlite' + db.connect driver=mesql database='$GISDBASE/$LOCATION_NAME/$MAPSET/vector/$MAP/' + +Implement insert/update cursors +------------------------------- +GRASS modules are currently sending all data to database drivers as +individual SQL insert/update statements. This makes the update process +slow (cunstructing and parsing queries) and number precision can be +lost. The solution is to implement db_open_insert/update_cursor() and +db_insert/update() in database drivers and use these functions in +modules. The drivers should then use precompiled statements +(e.g. SQLite) or they could update the database directly (DBF). + +Note that it is not necessary to implement these functions in all +drivers at the same time. You can implement lib/db/stubs functions +which will create SQL statement and send it to db_execute() which is +implemented in all drivers until the functions are properly +implemented in all drivers. + +SQLite driver +------------- +Current implementation is very slow with large updates/inserts. I +think that it is because all statements are parsed and it should be +possible to improve by insert/update cursors (see above). + +DBF driver +---------- +Add on the fly index for select/update. + +Implement db_copy_table() in drivers +---------------------------------- +db_copy_table() is implement in client library and it always reads and +writes all the data which is slow. It would be better to send this +request to the driver (if possible, i.e. input and output driver are +the same) which can copy tables much faster. For example true RDBMS +can use 'create table new as select * from old' and DBF driver can +simply copy files. + +Load drivers as dynamic libraries +--------------------------------- +Database drivers are implemented as executables which communicate with +modules via pipes. This implementation creates some problems with +portability (especially on Windows) and it makes communication slow (I +am not sure how much). It would be probably desirable to implement +drivers as loadable modules (dlopen() and equivalents). + + +2. Modules +---------- + +v.overlay +--------- +Select only relevant features which will be written to the output if +'and,not,nor' operators are used. An inspiration is available in +v.select. + +v.pack/v.unpack +--------------- +Write it. New modules to pack/unpack a vector to/from single file +(probably tar). I am not sure about format. Originally I was thinking +about ASCII+DBF as it can be read also without GRASS but ASCII and DBF +can lose precision and DBF has other limitations. It would be +probably better to use copy of 'coor' file and attributes written to +SQLite database. + +Update: see + https://trac.osgeo.org/grass/browser/grass-addons/grass7/vector/v.pack + and + https://trac.osgeo.org/grass/browser/grass-addons/grass7/vector/v.unpack + by Luca Delucchi + +1/2009: Other suggestions moved to + https://trac.osgeo.org/grass/ diff --git a/doc/vector/grass51atts.fig b/doc/examples/vector/grass51atts.fig similarity index 100% rename from doc/vector/grass51atts.fig rename to doc/examples/vector/grass51atts.fig diff --git a/doc/vector/grass51atts.png b/doc/examples/vector/grass51atts.png similarity index 100% rename from doc/vector/grass51atts.png rename to doc/examples/vector/grass51atts.png diff --git a/doc/vector/grass51concept.fig b/doc/examples/vector/grass51concept.fig similarity index 100% rename from doc/vector/grass51concept.fig rename to doc/examples/vector/grass51concept.fig diff --git a/doc/vector/grass51concept.png b/doc/examples/vector/grass51concept.png similarity index 100% rename from doc/vector/grass51concept.png rename to doc/examples/vector/grass51concept.png diff --git a/doc/vector/v.example/Makefile b/doc/examples/vector/v.example/Makefile similarity index 91% rename from doc/vector/v.example/Makefile rename to doc/examples/vector/v.example/Makefile index 9516d79b855..0197d49271d 100644 --- a/doc/vector/v.example/Makefile +++ b/doc/examples/vector/v.example/Makefile @@ -1,6 +1,6 @@ # fix this relative to include/ # or use absolute path to the GRASS source code -MODULE_TOPDIR = ../../.. +MODULE_TOPDIR = ../../../.. PGM = v.example diff --git a/doc/vector/v.example/main.c b/doc/examples/vector/v.example/main.c similarity index 100% rename from doc/vector/v.example/main.c rename to doc/examples/vector/v.example/main.c diff --git a/doc/vector/v.example/v.example.html b/doc/examples/vector/v.example/v.example.html similarity index 100% rename from doc/vector/v.example/v.example.html rename to doc/examples/vector/v.example/v.example.html diff --git a/doc/gui/wxpython/example/g.gui.example.html b/doc/gui/wxpython/example/g.gui.example.html index ab24a531dfa..c11ec5b2b8b 100644 --- a/doc/gui/wxpython/example/g.gui.example.html +++ b/doc/gui/wxpython/example/g.gui.example.html @@ -48,7 +48,7 @@

    EXAMPLE TOOL TOOLBAR

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/doc/notebooks/README.md b/doc/notebooks/README.md deleted file mode 100644 index 4a200b0f4f6..00000000000 --- a/doc/notebooks/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# Notebooks - -## Introduction - -### Using the notebooks locally - -Clone this repository with `git clone` first. Then, locally start the Jupyter -notebook server from the command line in the `doc/notebooks/` directory containing -the `*.ipynb` files with: -`jupyter notebook` - -This will open a new browser tab or window with a list of the contents of the current -working directory. Clicking on one of the `*.ipynb` files will start the notebook. - -See also the official documentation for [The Jupyter Notebook](https://jupyter-notebook.readthedocs.io/en/latest/). - -### Introductory notebooks to GRASS GIS and Jupyter - -* GRASS GIS in Jupyter Notebook with Python and grass.jupyter: [jupyter_example.ipynb](jupyter_example.ipynb) -* The grass.jupyter Package: [jupyter_tutorial.ipynb](jupyter_tutorial.ipynb) - -## Thematic Jupyter notebooks - -* Viewshed Analysis: [viewshed_analysis.ipynb](viewshed_analysis.ipynb) -* Spatio-Temporal Analysis with grass.jupyter: [temporal.ipynb](temporal.ipynb) -* Solar Energy Potential Analysis: [solar_potential.ipynb](solar_potential.ipynb) -* GRASS GIS Scripting with Python: [scripting_example.ipynb](scripting_example.ipynb) -* Hydrology with GRASS GIS: [hydrology.ipynb](hydrology.ipynb) diff --git a/doc/projectionintro.html b/doc/projectionintro.html index f85416640d3..c51c9efc761 100644 --- a/doc/projectionintro.html +++ b/doc/projectionintro.html @@ -12,7 +12,7 @@

    Projection management in general

    Reprojecting raster maps

    Rasters are reprojected using the raster projection tool -r.proj. +r.proj. The tool is used in the target project to "pull" a map from its source project. Both projects need to have a projection defined, i.e., they cannot be XY (unprojected). @@ -37,7 +37,7 @@

    Raster map transformation

    Vector map projections

    Vectors are reprojected using the vector projection tool -v.proj. +v.proj. The tool is used in the target project to "pull" a map from its source project. Both projects need to have a projection defined, i.e., they cannot be XY (unprojected). @@ -60,11 +60,11 @@

    Vector map transformation

    References

    See also

    @@ -76,4 +76,5 @@

    See also

  • Introduction into image processing
  • Introduction into temporal data processing
  • Database management
  • +
  • Graphical User Interface
  • diff --git a/doc/vector/TODO b/doc/vector/TODO index 75671559263..d43bffca15d 100644 --- a/doc/vector/TODO +++ b/doc/vector/TODO @@ -205,7 +205,7 @@ implemented in all drivers. SQLite driver ------------- Current implementation is very slow with large updates/inserts. I -think that it is because all statemets are parsed and it should be +think that it is because all statements are parsed and it should be possible to improve by insert/update cursors (see above). DBF driver diff --git a/docker/alpine/Dockerfile b/docker/alpine/Dockerfile index 8163b5d9b08..27afaf4d6db 100644 --- a/docker/alpine/Dockerfile +++ b/docker/alpine/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.20@sha256:beefdbd8a1da6d2915566fde36db9db0b524eb737fc57cd1367effd16dc0d06d as common +FROM alpine:3.21@sha256:b97e2a89d0b9e4011bb88c02ddf01c544b8c781acf1f4d559e7c8f12f1047ac3 as common # Based on: # https://github.com/mundialis/docker-grass-gis/blob/master/Dockerfile diff --git a/docker/ubuntu/Dockerfile b/docker/ubuntu/Dockerfile index 66b15d5970e..7fe43704f2f 100644 --- a/docker/ubuntu/Dockerfile +++ b/docker/ubuntu/Dockerfile @@ -1,4 +1,4 @@ -# syntax=docker/dockerfile:1.11@sha256:10c699f1b6c8bdc8f6b4ce8974855dd8542f1768c26eb240237b8f1c9c6c9976 +# syntax=docker/dockerfile:1.12@sha256:93bfd3b68c109427185cd78b4779fc82b484b0b7618e36d0f104d4d801e66d25 # Note: This file must be kept in sync in ./Dockerfile and ./docker/ubuntu/Dockerfile. # Changes to this file must be copied over to the other file. diff --git a/docker/ubuntu/README.md b/docker/ubuntu/README.md index 9a45651504c..a8f2764a239 100644 --- a/docker/ubuntu/README.md +++ b/docker/ubuntu/README.md @@ -88,7 +88,7 @@ bash-5.0# __To build a latest version with wxgui__: -The `GUI` build argument allows to choose if the GUI should +The `GUI` build argument allows choosing if the GUI should be included in the build (`GUI=with`) or not (`GUI=without`). ```bash diff --git a/flake.lock b/flake.lock index f36f65a6375..edaef13fad9 100644 --- a/flake.lock +++ b/flake.lock @@ -5,11 +5,11 @@ "nixpkgs-lib": "nixpkgs-lib" }, "locked": { - "lastModified": 1730504689, - "narHash": "sha256-hgmguH29K2fvs9szpq2r3pz2/8cJd2LPS+b4tfNFCwE=", + "lastModified": 1736143030, + "narHash": "sha256-+hu54pAoLDEZT9pjHlqL9DNzWz0NbUn8NEAHP7PQPzU=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "506278e768c2a08bec68eb62932193e341f55c90", + "rev": "b905f6fc23a9051a6e1b741e1438dbfc0634c6de", "type": "github" }, "original": { @@ -19,11 +19,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1730272153, - "narHash": "sha256-B5WRZYsRlJgwVHIV6DvidFN7VX7Fg9uuwkRW9Ha8z+w=", + "lastModified": 1736042175, + "narHash": "sha256-jdd5UWtLVrNEW8K6u5sy5upNAFmF3S4Y+OIeToqJ1X8=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "2d2a9ddbe3f2c00747398f3dc9b05f7f2ebb0f53", + "rev": "bf689c40d035239a489de5997a4da5352434632e", "type": "github" }, "original": { @@ -35,14 +35,14 @@ }, "nixpkgs-lib": { "locked": { - "lastModified": 1730504152, - "narHash": "sha256-lXvH/vOfb4aGYyvFmZK/HlsNsr/0CVWlwYvo2rxJk3s=", + "lastModified": 1735774519, + "narHash": "sha256-CewEm1o2eVAnoqb6Ml+Qi9Gg/EfNAxbRx1lANGVyoLI=", "type": "tarball", - "url": "https://github.com/NixOS/nixpkgs/archive/cc2f28000298e1269cea6612cd06ec9979dd5d7f.tar.gz" + "url": "https://github.com/NixOS/nixpkgs/archive/e9b51731911566bbf7e4895475a87fe06961de0b.tar.gz" }, "original": { "type": "tarball", - "url": "https://github.com/NixOS/nixpkgs/archive/cc2f28000298e1269cea6612cd06ec9979dd5d7f.tar.gz" + "url": "https://github.com/NixOS/nixpkgs/archive/e9b51731911566bbf7e4895475a87fe06961de0b.tar.gz" } }, "root": { diff --git a/general/g.filename/g.filename.html b/general/g.filename/g.filename.html index 0f6c89357e3..5462f5cb48e 100644 --- a/general/g.filename/g.filename.html +++ b/general/g.filename/g.filename.html @@ -6,7 +6,7 @@

    DESCRIPTION

    The list of element names to search for is not fixed; any subdirectory of the mapset directory is a valid element name.

    However, the user can find the list of standard GRASS GIS element names in -the file $GISBASE/etc/element_list. This is the file which +the file $GISBASE/etc/element_list. This is the file which g.remove/g.rename/g.copy use to determine which files need to be deleted/renamed/copied for a given entity type. diff --git a/general/g.findfile/g.findfile.html b/general/g.findfile/g.findfile.html index 9831f8cc29d..5746759fbd9 100644 --- a/general/g.findfile/g.findfile.html +++ b/general/g.findfile/g.findfile.html @@ -7,7 +7,7 @@

    DESCRIPTION

    The list of element names to search for is not fixed; any subdirectory of the mapset directory is a valid element name.

    However, the user can find the list of standard GRASS element -names in the file $GISBASE/etc/element_list. This is the file +names in the file $GISBASE/etc/element_list. This is the file which g.remove, g.rename and g.copy use to determine which files need to be deleted/renamed/copied for a given entity type. @@ -68,7 +68,7 @@

    SHELL

     eval `g.findfile element=vector file="$G_OPT_V_INPUT"`
     if [ ! "$file" ] ; then
    -   g.message -e "Vector map <$G_OPT_V_INPUT> not found"
    +   g.message -e "Vector map <$G_OPT_V_INPUT> not found"
        exit 1
     fi
     
    diff --git a/general/g.gisenv/g.gisenv.html b/general/g.gisenv/g.gisenv.html index 4f5d9e165b3..bf95770390c 100644 --- a/general/g.gisenv/g.gisenv.html +++ b/general/g.gisenv/g.gisenv.html @@ -1,5 +1,8 @@

    DESCRIPTION

    +g.gisenv outputs and modifies the user's current GRASS GIS +variable settings. + When a user runs GRASS, certain variables are set specifying the GRASS data base, project, mapset, peripheral device drivers, etc., being used in the current GRASS session. These variable name settings are @@ -46,7 +49,7 @@

    OPTIONS

    "project" directory itself contains subdirectories called "mapsets"; each "mapset" stores "data base elements" - the directories (e.g., -the cell, cellhd, vector, etc., directories) +the cell, cellhd, vector, etc., directories) in which GRASS data files are actually stored.
    LOCATION_NAME @@ -118,7 +121,7 @@

    OPTIONS

    The full path to the current mapset is determined from GISDBASE, LOCATION_NAME, MAPSET variables, in the example -above: /opt/grassdata/spearfish/PERMANENT. The full path can +above: /opt/grassdata/spearfish/PERMANENT. The full path can be printed using g.gisenv by providing multiple variables:
    @@ -146,7 +149,7 @@ 

    NOTES

    By default the GRASS variables are stored in gisrc file (defined by environmental variable GISRC). If store=mapset is given then the -variables are stored in <gisdbase>/<project>/<mapset>/VAR +variables are stored in <gisdbase>/<project>/<mapset>/VAR after the current GRASS session is closed.

    EXAMPLES

    @@ -186,10 +189,10 @@

    GRASS Debugging

    Levels: (recommended levels)
      -
    • 0 - silence -
    • 1 - message is printed once or few times per module -
    • 3 - each row (raster) or line (vector) -
    • 5 - each cell (raster) or point (vector) +
    • 0 - silence
    • +
    • 1 - message is printed once or few times per module
    • +
    • 3 - each row (raster) or line (vector)
    • +
    • 5 - each cell (raster) or point (vector)
    To disable debugging messages: diff --git a/general/g.gui/g.gui.html b/general/g.gui/g.gui.html index 4c8b162b6b1..3988edb68e3 100644 --- a/general/g.gui/g.gui.html +++ b/general/g.gui/g.gui.html @@ -11,18 +11,18 @@

    DESCRIPTION

    NOTES

    -If the -d update flag is given or the GRASS_GUI +If the -d update flag is given or the GRASS_GUI environmental variable is unset, then -the GRASS internal variable GUI is permanently changed and +the GRASS internal variable GUI is permanently changed and the selected ui will be used as the default UI from then on.

    All GRASS internal variables (see g.gisenv) are stored in the user's home directory in a hidden file called -$HOME/.grass8/rc on Unix-based operating systems -and %APPDATA%\GRASS8\rc on MS Windows. Note that these GRASS +$HOME/.grass8/rc on Unix-based operating systems +and %APPDATA%\GRASS8\rc on MS Windows. Note that these GRASS internal variables are not the shell environment variables and the -rc file is not a classic UNIX run command file, it just +rc file is not a classic UNIX run command file, it just contains persistent GRASS variables.

    EXAMPLES

    diff --git a/general/g.list/g.list.html b/general/g.list/g.list.html index b99b46cde61..cc646c6ad7b 100644 --- a/general/g.list/g.list.html +++ b/general/g.list/g.list.html @@ -42,7 +42,7 @@

    Mapset search path

    If mapset is not specified, then g.list searches for data files in the mapsets that are included in the search path (defined by g.mapsets). -See g.mapsets -p. +See g.mapsets -p.
     g.list rast -p
    @@ -66,7 +66,7 @@ 

    Mapset search path

    Similarly, mapset=* (one asterisk) prints data files from all available mapsets also including those that are not listed in the -current search path (see g.mapsets -l). +current search path (see g.mapsets -l).
     g.list rast mapset=* -p
    @@ -177,7 +177,7 @@ 

    Maps whose region overlaps with a saved region

    g.list type=rast,vect region=*
    -Note that, without region=*, g.list type=rast,vect simply +Note that, without region=*, g.list type=rast,vect simply lists all available raster and vector maps from the current search path regardless of their region. diff --git a/general/g.mapset/g.mapset.html b/general/g.mapset/g.mapset.html index d8838b59a26..dd32c1d00d8 100644 --- a/general/g.mapset/g.mapset.html +++ b/general/g.mapset/g.mapset.html @@ -4,7 +4,7 @@

    DESCRIPTION

    as location), or GISDBASE (directory with one or more projects).

    -With g.mapset, the shell history (i.e. .bash_history file +With g.mapset, the shell history (i.e. .bash_history file of the initial project will be used to record the command history.

    NOTES

    diff --git a/general/g.mapsets/g.mapsets.html b/general/g.mapsets/g.mapsets.html index 3fe1a52df73..8fa75baaf2f 100644 --- a/general/g.mapsets/g.mapsets.html +++ b/general/g.mapsets/g.mapsets.html @@ -1,5 +1,8 @@

    DESCRIPTION

    +g.mapsets modifies/prints the user's current mapset search +path. + For basic information about GRASS mapset, project and data base refer to GRASS Quickstart. @@ -23,7 +26,7 @@

    DESCRIPTION

    their current mapset. Although the user can also access (i.e., use) data that are stored under other mapsets in the same GRASS project using the -mapname@mapsetname notation or mapset search path, the user +mapname@mapsetname notation or mapset search path, the user can only make permanent changes (create or modify data) located in the current mapset. The user's mapset search path lists the order in which other mapsets in @@ -33,7 +36,7 @@

    DESCRIPTION

    using the g.mapsets command. This program allows the user to use other's relevant map data without altering the original data layer, and without taking up disk space with a copy of the original -map. The mapname@mapsetname notation may be used irrespective +map. The mapname@mapsetname notation may be used irrespective of the mapset search path, i.e., any map found in another mapset with sufficient g.access privileges may be called in such a manner. @@ -60,8 +63,8 @@

    DESCRIPTION

    g.copy raster=soils@PERMANENT,my_soils
    -ensures that a new file named my_soils is to be a copy of -the file soils from the mapset PERMANENT. +ensures that a new file named my_soils is to be a copy of +the file soils from the mapset PERMANENT.

    In each project there is the special mapset PERMANENT included diff --git a/general/g.message/g.message.html b/general/g.message/g.message.html index 71ce75854db..67587eaf002 100644 --- a/general/g.message/g.message.html +++ b/general/g.message/g.message.html @@ -1,10 +1,13 @@

    DESCRIPTION

    -

    This program is to be used in Shell/Perl/Python scripts, so the author does not -need to use the echo program. The advantage of g.message is +g.message prints a message, warning, progress info, or fatal error +in the GRASS GIS way. + +This program is to be used in Shell/Perl/Python scripts, so the author does not +need to use the echo program. The advantage of g.message is that it formats messages just like other GRASS modules do and that its -functionality is influenced by the GRASS_VERBOSE and -GRASS_MESSAGE_FORMAT environment variables. +functionality is influenced by the GRASS_VERBOSE and +GRASS_MESSAGE_FORMAT environment variables.

    The program can be used for standard informative messages as well as warnings @@ -14,7 +17,7 @@

    DESCRIPTION

    NOTES

    -Messages containing "=" must use the full message= syntax so +Messages containing "=" must use the full message= syntax so the parser doesn't get confused.

    If you want a long message (multi-line) to be dealt with as a single @@ -31,18 +34,18 @@

    NOTES

    It's advisable to single quote the messages that are to be printed literally. It prevents a number of characters (most notably, space and the dollar sign -'$') from being treated specifically by the shell. +'$') from being treated specifically by the shell.

    When it is necessary to include, for example, a variable's value as part of the message, the double quotes may be used, which do not deprive the dollar sign of its special variable-expansion powers.

    While it is known that the interactive Bash instances may treat the -exclamation mark '!' character specifically (making single quoting +exclamation mark '!' character specifically (making single quoting of it necessary), it shouldn't be the case for the non-interactive instances of Bash. Nonetheless, to avoid context-based confusion later on you are encouraged to single-quote messages that do not require -$VARIABLE expansion. +$VARIABLE expansion.

    Usage in Python scripts

    @@ -51,13 +54,13 @@

    Usage in Python scripts

    for g.message.
      -
    • debug() for g.message -d -
    • error() for g.message -e -
    • fatal() for g.message -e + exit() -
    • info() for g.message -i -
    • message() for g.message -
    • verbose() for g.message -v -
    • warning() for g.message -w +
    • debug() for g.message -d
    • +
    • error() for g.message -e
    • +
    • fatal() for g.message -e + exit()
    • +
    • info() for g.message -i
    • +
    • message() for g.message
    • +
    • verbose() for g.message -v
    • +
    • warning() for g.message -w

    @@ -78,24 +81,24 @@

    Usage in Python scripts

    VERBOSITY LEVELS

    -Controlled by the "GRASS_VERBOSE" environment variable. Typically this +Controlled by the "GRASS_VERBOSE" environment variable. Typically this is set using the --quiet or --verbose command line options.
      -
    • 0 - only errors and warnings are printed -
    • 1 - progress messages are printed -
    • 2 - all module messages are printed -
    • 3 - additional verbose messages are printed +
    • 0 - only errors and warnings are printed
    • +
    • 1 - progress messages are printed
    • +
    • 2 - all module messages are printed
    • +
    • 3 - additional verbose messages are printed

    DEBUG LEVELS

    -Controlled by the "DEBUG" GRASS gisenv variable (set with +Controlled by the "DEBUG" GRASS gisenv variable (set with g.gisenv).
    Recommended levels:
      -
    • 1 - message is printed once or few times per module -
    • 3 - each row (raster) or line (vector) -
    • 5 - each cell (raster) or point (vector) +
    • 1 - message is printed once or few times per module
    • +
    • 3 - each row (raster) or line (vector)
    • +
    • 5 - each cell (raster) or point (vector)

    EXAMPLES

    @@ -138,7 +141,10 @@

    EXAMPLES

    SEE ALSO

    -GRASS variables and environment variables
    +GRASS variables and environment variables +
    +
    + g.gisenv, g.parser diff --git a/general/g.mkfontcap/g.mkfontcap.html b/general/g.mkfontcap/g.mkfontcap.html index b1ddeb0663b..2854d808a3b 100644 --- a/general/g.mkfontcap/g.mkfontcap.html +++ b/general/g.mkfontcap/g.mkfontcap.html @@ -2,7 +2,7 @@

    DESCRIPTION

    g.mkfontcap is a utility to generate a GRASS font configuration file ("fontcap") containing details of the fonts available on the current system. -If Freetype is not installed, +If Freetype is not installed, the font list will be limited to the set of Hershey stroke fonts supplied with GRASS. With Freetype enabled however, the module will recursively scan all files within a predefined hierarchy to find Freetype-compatible scalable @@ -24,10 +24,10 @@

    DESCRIPTION

    of the string, if enclosed in ${xxx} syntax (see examples above).

    The module will normally write to the standard fontcap file location, -$GISBASE/etc/fontcap. If the environment variable -GRASS_FONT_CAP is set, the output will instead be written +$GISBASE/etc/fontcap. If the environment variable +GRASS_FONT_CAP is set, the output will instead be written to the file specified by that variable. This is useful if you don't have -permission to modify $GISBASE/etc/fontcap: in this case you can +permission to modify $GISBASE/etc/fontcap: in this case you can use e.g.

    diff --git a/general/g.parser/g.parser.html b/general/g.parser/g.parser.html
    index b1de89a3a08..c438a675454 100644
    --- a/general/g.parser/g.parser.html
    +++ b/general/g.parser/g.parser.html
    @@ -75,8 +75,8 @@ 

    OPTIONS

    # %end
    -With {NULL} it is possible to suppress a predefined description -or label. +With {NULL} it is possible to suppress a predefined description +or label.

    The parsers allows using predefined standardized options and @@ -150,9 +150,9 @@

    NOTES

    -A "guisection" field may be added to each option and flag to +A "guisection" field may be added to each option and flag to specify that the options should appear in multiple tabs in the -auto-generated GUI. Any options without a guisection field +auto-generated GUI. Any options without a guisection field go into the "Required" or "Options" tab. For example:

     # % guisection: tabname
    @@ -160,23 +160,23 @@ 

    NOTES

    would put that option in a tab named tabname.

    -A "key_desc" field may be added to each option to specify the text that +A "key_desc" field may be added to each option to specify the text that appears in the module's usage help section. For example:

     # % key_desc: filename
     
    added to an input option would create the usage summary -[input=filename]. +[input=filename].

    If a script is run with --o, the parser will -set GRASS_OVERWRITE=1, which has the same effect as passing +set GRASS_OVERWRITE=1, which has the same effect as passing --o to every module which is run from the script. Similarly, passing ---q or --v will set GRASS_VERBOSE to 0 or 3 respectively, +--q or --v will set GRASS_VERBOSE to 0 or 3 respectively, which has the same effect as passing --q or --v to every module which is run from the script. Rather than checking whether --o, --q or --v -were used, you should be checking GRASS_OVERWRITE and/or -GRASS_VERBOSE instead. If those variables are set, the script +were used, you should be checking GRASS_OVERWRITE and/or +GRASS_VERBOSE instead. If those variables are set, the script should behave the same way regardless of whether they were set by --o, --q or --v being passed to the script or set by other means. @@ -191,13 +191,13 @@

    Conditional parameters

    Marking an option as "required" will result in the parser raising a fatal error if the option is not given, with one exception: if a flag -has the suppress_required option, and that flag is given, all +has the suppress_required option, and that flag is given, all requirements are ignored. This feature is intended for flags which abandon "normal operation" for the module; e.g. r.in.gdal's -f flag (list supported formats) uses it.
    But in general, an option cannot be marked as required if it is -optional except for the special case of a suppress_required flag. +optional except for the special case of a suppress_required flag. The parser has the ability to specify option relationships.

    @@ -219,15 +219,15 @@

    Conditional parameters

    The available rule types are:
      -
    • exclusive: at most one of the options may be given
    • -
    • required: at least one of the options must be given
    • -
    • requires: if the first option is given, at least one of the +
    • exclusive: at most one of the options may be given
    • +
    • required: at least one of the options must be given
    • +
    • requires: if the first option is given, at least one of the subsequent options must also be given
    • -
    • requires_all: if the first option is given, all of the +
    • requires_all: if the first option is given, all of the subsequent options must also be given
    • -
    • excludes: if the first option is given, none of the +
    • excludes: if the first option is given, none of the subsequent options may be given
    • -
    • collective: all or nothing; if any option is given, all +
    • collective: all or nothing; if any option is given, all must be given
    @@ -305,7 +305,7 @@

    reStructuredText

    reStructuredText is sometimes abbreviated as reST, ReST, or RST. -The commonly used file extension is .rst. +The commonly used file extension is .rst. Don't be confused with Representational State Transfer (REST) technology.

    TRANSLATION

    @@ -334,7 +334,7 @@

    EXAMPLES

    To run properly, the script needs to be copied into a directory listed -in $GRASS_ADDON_PATH environmental variable with the +in $GRASS_ADDON_PATH environmental variable with the executable flag being set.

    @@ -523,7 +523,7 @@

    Example code for Perl

    if( $ARGV[0] ne '@ARGS_PARSED@' ){ my $arg = ""; - for (my $i=0; $i < @ARGV;$i++) { + for (my $i=0; $i < @ARGV;$i++) { $arg .= " $ARGV[$i] "; } system("$ENV{GISBASE}/bin/g.parser $0 $arg"); @@ -645,7 +645,7 @@

    Easy creation of a script

    # % multiple: no # % key_desc: sql_query # % label: WHERE conditions of SQL statement without 'where' keyword -# % description: Example: income < 1000 and population >= 10000 +# % description: Example: income < 1000 and population >= 10000 # %end import sys diff --git a/general/g.proj/g.proj.html b/general/g.proj/g.proj.html index d24927d8761..89a44c93622 100644 --- a/general/g.proj/g.proj.html +++ b/general/g.proj/g.proj.html @@ -11,9 +11,9 @@

    DESCRIPTION

    If compiled without OGR present, the functionality is limited to:
      -
    • Reporting the CRS information for the current project -(previously called location), -either in conventional GRASS (-p flag) or PROJ (-j flag) format
    • +
    • Reporting the CRS information for the current project (previously called +location), either in conventional GRASS (-p flag) or PROJ (-j +flag) format
    • Changing the datum, or reporting and modifying the datum transformation parameters, for the current project
    @@ -32,25 +32,25 @@

    DESCRIPTION

    read from this file. If the file is not georeferenced or cannot be read, XY (unprojected) will be used. -
    wkt=filename or -
    +
    wkt=filename or -
    The file filename should contain a CRS description in WKT -format with or without line-breaks (e.g. a '.prj' file). If - is given +format with or without line-breaks (e.g. a '.prj' file). If - is given for the filename, the WKT description will be read from stdin rather than a file.
    -
    proj4=description or -
    +
    proj4=description or -
    description should be a CRS description in PROJ format, enclosed in -quotation marks if there are any spaces. If - is given for +quotation marks if there are any spaces. If - is given for description, the PROJ description will be read from stdin rather than as a directly-supplied command-line parameter.
    epsg=number
    number should correspond to the index number of a valid -co-ordinate system in the EPSG +co-ordinate system in the EPSG database. EPSG code support is based upon a local copy of the GDAL CSV co-ordinate system and datum information files, stored in the directory -$GISBASE/etc/proj/ogr_csv. These can be updated if necessary +$GISBASE/etc/proj/ogr_csv. These can be updated if necessary to support future revisions of the EPSG database.
    @@ -65,22 +65,22 @@

    DESCRIPTION

    datumtrans parameter should in general always be used in conjunction with datum. -

    The -p, -j, -w, etc. flags are all functional when importing CRS -information from an external source, meaning that g.proj can be -used to convert between representations of the information. It is +

    The -p, -j, -w, etc. flags are all functional when +importing CRS information from an external source, meaning that g.proj +can be used to convert between representations of the information. It is not required that either the input or output be in GRASS format. -

    In addition however, if the -c flag is specified, g.proj will +

    In addition however, if the -c flag is specified, g.proj will create new GRASS CRS files (PROJ_INFO, PROJ_UNITS, WIND and DEFAULT_WIND) based on the imported information. If the project -parameter is specified in addition to -c, then a new project will be created. +parameter is specified in addition to -c, then a new project will be created. Otherwise the CRS information files in the current project will be overwritten. The program will not warn before doing this. -

    The final mode of operation of g.proj is to report on the datum +

    The final mode of operation of g.proj is to report on the datum information and datum transformation parameters associated with the -co-ordinate system. The -d flag will report a human-readable summary of +co-ordinate system. The -d flag will report a human-readable summary of this.

    NOTES

    @@ -88,17 +88,21 @@

    NOTES

    If the input co-ordinate system contains a datum name but no transformation parameters, and there is more than one suitable parameter set available (according to the files datum.table and datumtransform.table in -$GISBASE/etc/proj), g.proj will check the value of -the datumtrans option and act according to the following:
    --1: List available parameter sets in a GUI-parsable (but also -human-readable) format and exit.
    -0 (default): Continue without specifying parameters - if +$GISBASE/etc/proj), g.proj will check the value of +the datumtrans option and act according to the following: + +

      +
    • -1: List available parameter sets in a GUI-parsable (but also +human-readable) format and exit.
    • +
    • 0 (default): Continue without specifying parameters - if used when creating a project, other GRASS modules will use the "default" -(likely non-optimum) parameters for this datum if necessary in the future.
      -Any other number less than or equal to the number of parameter sets +(likely non-optimum) parameters for this datum if necessary in the future.
    • +
    • Any other number less than or equal to the number of parameter sets available for this datum: Choose this parameter set and add it to the -co-ordinate system description.
      -If the -t flag is specified, the module will attempt to change the +co-ordinate system description.
    • +
    + +If the -t flag is specified, the module will attempt to change the datum transformation parameters using one of the above two methods even if a valid parameter set is already specified in the input co-ordinate system. This can be useful to change the datum information @@ -137,7 +141,7 @@

    Create projection (PRJ) file

    Create a '.prj' file in ESRI format corresponding to the current project:
    -g.proj -wef > irish_grid.prj
    +g.proj -wef > irish_grid.prj
     

    Read CRS from file

    @@ -167,10 +171,10 @@

    Create new project

    Create a new project with the coordinate system referred to by EPSG code -900913 (Google Mercator Projection)
    +3857 (Pseudo-Mercator Projection)

    -g.proj -c epsg=900913 project=google
    +g.proj -c epsg=3857 project=google
     

    Create a new project with the coordinate system referred to by EPSG code @@ -228,14 +232,13 @@

    REFERENCES

    Further reading

    SEE ALSO

    - m.proj, r.proj, diff --git a/general/g.region/g.region.html b/general/g.region/g.region.html index 85d75a058df..2eb4e58aa08 100644 --- a/general/g.region/g.region.html +++ b/general/g.region/g.region.html @@ -4,7 +4,7 @@

    DESCRIPTION

    settings of the current geographic region. These regional boundaries can be set by the user directly and/or set from a region definition file (stored under the -windows directory in the user's current +windows directory in the user's current mapset). The user can create, modify, and store as many geographic region definitions as desired for any given mapset. However, only one of these geographic region @@ -50,8 +50,8 @@

    DEFINITIONS

    Each GRASS project (previously called location) has a fixed geographic region, called the default geographic region -(stored in the region file DEFAULT_WIND under -the special mapset PERMANENT), that defines the +(stored in the region file DEFAULT_WIND under +the special mapset PERMANENT), that defines the extent of the data base. While this provides a starting point for defining new geographic regions, user-defined geographic regions need not fall within this geographic @@ -73,14 +73,14 @@

    DEFINITIONS

    Each GRASS MAPSET may contain any number of pre-defined, and named, geographic regions. These region definitions are stored in the user's current mapset -location under the windows directory (also +location under the windows directory (also referred to as the user's saved region definitions). Any of these pre-defined geographic regions may be selected, by name, to become the current geographic region. Users may also access saved region definitions stored under other mapsets in the current project, if these mapsets are included in the user's mapset search -path or the '@' operator is used (region_name@mapset). +path or the '@' operator is used (region_name@mapset).

    NOTES

    @@ -118,7 +118,7 @@

    NOTES

    The -g flag prints the current region settings in shell script style. This format can be given back to g.region on its command line. This may also be used to save region settings as shell environment variables -with the UNIX eval command, "eval `g.region -g`". +with the UNIX eval command, "eval `g.region -g`".

    With -u flag current region is not updated even if one or more options for changing region is used (res=, raster=, etc). @@ -163,9 +163,9 @@

    EXAMPLES

    Printing extent and raster resolution in 2D and 3D

    -
    +
    g.region -p - +
    This will print the current region in the format: @@ -185,9 +185,9 @@

    Printing extent and raster resolution in 2D and 3D

    -

    +
    g.region -p3 - +
    This will print the current region and the 3D region (used for voxels) in the format: @@ -216,9 +216,9 @@

    Printing extent and raster resolution in 2D and 3D

    -

    +
    g.region -g - +
    The -g option prints the region in the following script style (key=value) format: @@ -235,9 +235,9 @@

    Printing extent and raster resolution in 2D and 3D

    -

    +
    g.region -bg - +
    The -bg option prints the region in the following script style (key=value) format plus the @@ -259,9 +259,9 @@

    Printing extent and raster resolution in 2D and 3D

    -

    +
    g.region -l - +
    The -l option prints the region in the following format: @@ -278,9 +278,9 @@

    Printing extent and raster resolution in 2D and 3D

    -

    +
    g.region -pm - +
    This will print the current region in the format (latitude-longitude project): @@ -303,36 +303,36 @@

    Printing extent and raster resolution in 2D and 3D

    Changing extent and raster resolution using values

    -
    +
    g.region n=7360100 e=699000 - +
    will reset the northing and easting for the current region, but leave the south edge, west edge, and the region cell resolutions unchanged.

    -

    +
    g.region n=51:36:05N e=10:10:05E s=51:29:55N w=9:59:55E res=0:00:01 - +
    will reset the northing, easting, southing, westing and resolution for the current region, here in DMS latitude-longitude style (decimal degrees and degrees with decimal minutes can also be used).

    -

    +
    g.region -dp s=698000 - +
    will set the current region from the default region for the GRASS project, reset the south edge to 698000, and then print the result.

    -

    +
    g.region n=n+1000 w=w-500 - +
    The n=value may also be specified as a function of its current value: n=n+value @@ -344,9 +344,9 @@

    Changing extent and raster resolution using values

    decreased by 500 units.

    -

    +
    g.region n=s+1000 e=w+1000 - +
    This form allows the user to set the region boundary values relative to one another. Here, the northern @@ -364,19 +364,18 @@

    Changing extent and raster resolution using values

    Changing extent and raster resolution using maps

    -
    +
    g.region raster=soils - +
    This form will make the current region settings exactly the same as those given in the cell header file for the raster map layer soils.

    - -

    +
    g.region raster=soils zoom=soils - +
    This form will first look up the cell header file for the raster map layer soils, use this as the @@ -385,14 +384,13 @@

    Changing extent and raster resolution using maps

    data in the map layer soils. Note that if the parameter raster=soils were not specified, the zoom would shrink to encompass all non-NULL data values in -the soils map that were located within the current region +the soils map that were located within the current region settings.

    - -

    +
    g.region -up raster=soils - +
    The -u option suppresses the re-setting of the current region definition. This can be useful when it is @@ -401,24 +399,24 @@

    Changing extent and raster resolution using maps

    without changing the current region settings.

    -

    +
    g.region -up zoom=soils save=soils - +
    This will zoom into the smallest region which encompasses all non-NULL soils data values, and save the new region settings in a file to be called soils -and stored under the windows directory in the +and stored under the windows directory in the user's current mapset. The current region settings are not changed.

    Changing extent and raster resolution in 3D

    -
    +
    g.region b=0 t=3000 tbres=200 res3=100 g.region -p3 - +
    This will define the 3D region for voxel computations. In this example a volume with bottom (0m) to top (3000m) @@ -430,7 +428,7 @@

    Using g.region in a shell in combination with OGR

    Extracting a spatial subset of the external vector map -soils.shp into new external vector map soils_cut.shp +soils.shp into new external vector map soils_cut.shp using the OGR ogr2ogr tool:
    @@ -443,8 +441,8 @@ 

    Using g.region in a shell in combination with OGR

    Using g.region in a shell in combination with GDAL

    Extracting a spatial subset of the external raster map -p016r035_7t20020524_z17_nn30.tif into new external raster -map p016r035_7t20020524_nc_spm_wake_nn30.tif using the GDAL +p016r035_7t20020524_z17_nn30.tif into new external raster +map p016r035_7t20020524_nc_spm_wake_nn30.tif using the GDAL gdalwarp tool:
    diff --git a/general/g.remove/g.remove.html b/general/g.remove/g.remove.html
    index b6554a398db..974a5d7c40e 100644
    --- a/general/g.remove/g.remove.html
    +++ b/general/g.remove/g.remove.html
    @@ -3,16 +3,16 @@ 

    DESCRIPTION

    g.remove removes data files matching a pattern given by wildcards or POSIX Extended Regular Expressions. If the -f force flag is not given then nothing is removed, instead the list of selected file names is printed to -stdout as a preview of the files to be deleted. +stdout as a preview of the files to be deleted.

    EXAMPLES

    -Delete map1 and map2 raster maps in the current mapset: +Delete map1 and map2 raster maps in the current mapset:
     g.remove -f type=raster name=tmp1,tmp2
     
    -Delete all raster and vector maps starting with "tmp_" in the current +Delete all raster and vector maps starting with "tmp_" in the current mapset:
     # show matching raster and vector maps but do not delete yet (as verification)
    @@ -22,8 +22,8 @@ 

    EXAMPLES

    g.remove -f type=raster,vector pattern="tmp_*"
    -Delete all vector maps starting with "stream_" in the current mapset, -but exclude those ending with "_final": +Delete all vector maps starting with "stream_" in the current mapset, +but exclude those ending with "_final":
     g.remove -f type=vector pattern="stream_*" exclude="*_final"
     
    diff --git a/general/g.setproj/g.setproj.html b/general/g.setproj/g.setproj.html index 8adde5637aa..eb39ccb4010 100644 --- a/general/g.setproj/g.setproj.html +++ b/general/g.setproj/g.setproj.html @@ -9,7 +9,7 @@ -GRASS logo
    +GRASS logo

    NAME

    @@ -52,7 +52,7 @@

    NOTES

    If the datum or ellipsoid required are not listed within this program, the user/administrator may add the definition to the files datum.table, datumtransform.table and ellipse.table in the -$GISBASE/etc/proj directory. +$GISBASE/etc/proj directory.

    Depending on the projection selected, the user will then be prompted for the various other parameters required to define it. @@ -64,18 +64,20 @@

    NOTES

    SEE ALSO

    -g.proj, -m.proj, -r.proj, -v.proj, -PROJ site + +g.proj, +m.proj, +r.proj, +v.proj, +PROJ site +

    Further reading

    AUTHORS

    diff --git a/general/g.tempfile/g.tempfile.html b/general/g.tempfile/g.tempfile.html index 8952e531b78..668252e8a7b 100644 --- a/general/g.tempfile/g.tempfile.html +++ b/general/g.tempfile/g.tempfile.html @@ -3,7 +3,7 @@

    DESCRIPTION

    g.tempfile is designed for shell scripts that need to use large temporary files. GRASS provides a mechanism for temporary files that does not depend on -/tmp. GRASS temporary files are created in the data base with the assumption +/tmp/. GRASS temporary files are created in the data base with the assumption that there will be enough space under the data base for large files. GRASS periodically removes temporary files that have been left behind by programs that failed to remove them before terminating. @@ -13,18 +13,21 @@

    DESCRIPTION

    creates an unique file and prints the name. The user is required to provide a process-id which will be used as part of the name of the file. Most Unix shells provide a way to get the process id of the current shell. -For /bin/sh and /bin/csh this is $$. -It is recommended that $$ be specified as the process-id for +For /bin/sh and /bin/csh this is $$. +It is recommended that $$ be specified as the process-id for g.tempfile.

    EXAMPLE

    -For /bin/sh scripts the following syntax should be used: +For /bin/sh scripts the following syntax should be used: +
     temp1=`g.tempfile pid=$$`
     temp2=`g.tempfile pid=$$`
     
    -For /bin/csh scripts, the following can be used: + +For /bin/csh scripts, the following can be used: +
     set temp1=`g.tempfile pid=$$`
     set temp2=`g.tempfile pid=$$`
    @@ -38,10 +41,10 @@ 

    NOTES

    Although GRASS does eventually get around to removing tempfiles that have been left behind, the programmer should make every effort to remove these files. They often get -large and take up disk space. If you write /bin/sh scripts, -learn to use the /bin/sh trap command. If you -write /bin/csh scripts, learn to use the /bin/csh -onintr command. +large and take up disk space. If you write /bin/sh scripts, +learn to use the /bin/sh related trap command. If you +write /bin/csh scripts, learn to use the /bin/csh +related onintr command.

    AUTHOR

    diff --git a/general/g.version/g.version.html b/general/g.version/g.version.html index f5d2f63043c..abb3867c9d5 100644 --- a/general/g.version/g.version.html +++ b/general/g.version/g.version.html @@ -22,7 +22,7 @@

    NOTES

    by -e flag.

    -See also function version() +See also function version() from Python Scripting Library. @@ -69,10 +69,10 @@

    Full info in shell script style

    sqlite=3.36.0
    -Note: if revision=exported is reported instead of the git hash then the -git program was not available during compilation of GRASS GIS and the -source code did not contain the .git/ subdirectory (requires e.g. to -git clone the GRASS GIS software repository.) +Note: if revision=exported is reported instead of the git hash then the +git program was not available during compilation of GRASS GIS and the +source code did not contain the .git/ subdirectory (requires e.g. to +git clone the GRASS GIS software repository.)

    Citing GRASS GIS

    diff --git a/grasslib.dox b/grasslib.dox index dd5de826cfe..1dd0bce6a00 100644 --- a/grasslib.dox +++ b/grasslib.dox @@ -1,7 +1,7 @@ /*! \mainpage GRASS GIS 8 Programmer's Manual GRASS GIS (Geographic @@ -23,7 +23,7 @@ Team, an international team of programmers, GRASS module authors are cited within their module's source code and the contributed manual pages. -© 2000-2024 by the GRASS Development Team +© 2000-2025 by the GRASS Development Team This manual is published under GNU Free Documentation diff --git a/gui/Makefile b/gui/Makefile index 3f7a063fb02..30102feb257 100644 --- a/gui/Makefile +++ b/gui/Makefile @@ -1,7 +1,11 @@ MODULE_TOPDIR = .. +PGM = wxguiintro + SUBDIRS = icons images scripts xml wxpython include $(MODULE_TOPDIR)/include/Make/Dir.make -default: parsubdirs +default: htmldir + +htmldir: parsubdirs diff --git a/gui/scripts/Makefile b/gui/scripts/Makefile index 7630fd6d8f5..56c8a27f8e7 100644 --- a/gui/scripts/Makefile +++ b/gui/scripts/Makefile @@ -1,7 +1,7 @@ MODULE_TOPDIR = ../.. -include $(MODULE_TOPDIR)/include/Make/Rules.make include $(MODULE_TOPDIR)/include/Make/Vars.make +include $(MODULE_TOPDIR)/include/Make/Rules.make include $(MODULE_TOPDIR)/include/Make/Python.make DSTDIR = $(GUIDIR)/scripts diff --git a/gui/wxguiintro.html b/gui/wxguiintro.html new file mode 100644 index 00000000000..0ba40cd4084 --- /dev/null +++ b/gui/wxguiintro.html @@ -0,0 +1,75 @@ + + + +

    Introduction to the GRASS GIS Graphical User Interface

    + +

    Overview

    + +The wxGUI (wxPython-based Graphical User Interface) is the primary user +interface for GRASS GIS. Designed for efficiency and ease of use, the +wxGUI provides an intuitive way to interact with spatial data and the +powerful tools available in GRASS GIS. + +The GUI supports the visualisation of spatial data, the execution of +geoprocessing tasks or the management of complex workflows and offers a +comprehensive set of tools. + +

    Features

    + +The wxGUI is designed to cater to both novice and advanced users with +the following features: + +
      +
    • A clean and customizable layout for efficient workspace management.
    • +
    • Integrated support for both raster and vector data operations.
    • +
    • Drag-and-drop capabilities for quick layer addition and arrangement.
    • +
    • Support for live previews of data and processing results.
    • +
    • Direct access to GRASS GIS modules, complete with user-friendly + dialog windows.
    • +
    • Advanced debugging and scripting capabilities for developers and + power users.
    • +
    + +

    Getting Started

    + +To launch the wxGUI, simply start GRASS GIS. Upon startup, the wxGUI will +load, providing access to its various components. + +The wxGUI usage is explained in greater detail
    here. + +New GRASS GIS users can explore the integrated help system or visit the +GRASS GIS documentation +for tutorials and guides. + +

    Key Components

    + +The wxGUI is composed of several modules and features: + +
      +
    • Map Display: Visualize raster, vector, and 3D data + layers in an interactive map viewer.
    • +
    • Layer Manager: Organize and control the visibility, + styling, and properties of your data layers.
    • +
    • Data Catalog: Explore and manage GRASS GIS mapsets + and spatial data with ease.
    • +
    • Geoprocessing Tools: Access a wide range of geospatial + analysis and modeling tools through an easy-to-use interface.
    • +
    • Command Console: Run GRASS GIS commands directly, + with syntax highlighting and autocompletion support.
    • +
    • 3D View: Analyze and visualize 3D landscapes + using NVIZ.
    • +
    + +The wxGUI components are explained in greater detail +here. + +

    See also

    + + diff --git a/gui/wxpython/.pylintrc b/gui/wxpython/.pylintrc deleted file mode 100644 index 7bfe0db218d..00000000000 --- a/gui/wxpython/.pylintrc +++ /dev/null @@ -1,688 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-allow-list= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. (This is an alternative name to extension-pkg-allow-list -# for backward compatibility.) -extension-pkg-whitelist= - -# Return non-zero exit code if any of these messages/categories are detected, -# even if score is above --fail-under value. Syntax same as enable. Messages -# specified are enabled, while categories only check already-enabled messages. -fail-on= - -# Specify a score threshold to be exceeded before program exits with error. -fail-under=10.0 - -# Files or directories to be skipped. They should be base names, not paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the ignore-list. The -# regex matches against paths and can be in Posix or Windows format. -ignore-paths=core/.*, - dbmgr/.*, - docs/.*, # Sphinx config files. - gcp/.*, - gmodeler/.*, - gui_core/.*, - iclass/.*, - image2target/.*, - iscatt/.*, - lmgr/.*, # Close to being compliant. - location_wizard/.*, # Close to being compliant. - mapdisp/.*, # Close to being compliant. - mapswipe/.*, # Close to being compliant. - mapwin/.*, # Close to being compliant. - modules/.*, # Close to being compliant. - photo2image/.*, - nviz/.*, - psmap/.*, - tplot/.*, # Close to being compliant. - vdigit/.*, - vnet/.*, # Close to being compliant. - web_services/.*, # Close to being compliant. - wxplot/.*, # Close to being compliant. - xml/, # XML files only. - menustrings.py, - wxgui.py, - states.txt, - .*Makefile, - .*README.*, - - -# Files or directories matching the regex patterns are skipped. The regex -# matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Minimum Python version to use for version dependent checks. Will default to -# the version used to run pylint. -py-version=3.8 - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - deprecated-pragma, - fixme, - unnecessary-lambda, - # Import issues - import-error, - wrong-import-position, - ungrouped-imports, - wrong-import-order, - unused-import, - import-outside-toplevel, - consider-using-from-import, - no-name-in-module, - # End of import issues - missing-function-docstring, - missing-module-docstring, - missing-class-docstring, - useless-object-inheritance, - attribute-defined-outside-init, - no-self-use, - unused-variable, - possibly-unused-variable, - unused-argument, - expression-not-assigned, - unnecessary-pass, - pointless-string-statement, - unreachable, - self-assigning-variable, - redefined-builtin, - redefined-outer-name, - cell-var-from-loop, - undefined-loop-variable, - unspecified-encoding, - arguments-differ, - arguments-renamed, - no-value-for-parameter, - redundant-keyword-arg, - protected-access, - inconsistent-return-statements, - too-many-function-args, - global-statement, - global-variable-not-assigned, - global-variable-undefined, - dangerous-default-value, - broad-except, - bare-except, - invalid-envvar-default, - anomalous-backslash-in-string, - # Here we start consider... warnings - consider-using-enumerate, - consider-using-set-comprehension, - consider-using-in, - consider-using-dict-items, - consider-iterating-dictionary, - consider-using-f-string, - unnecessary-comprehension, - simplifiable-if-expression, - simplifiable-if-statement, - use-list-literal, - use-dict-literal, - use-a-generator, - use-implicit-booleaness-not-len, - no-else-return, - no-else-raise, - no-else-continue, - raise-missing-from, - super-with-arguments, - useless-return, - # Here we end consider... warnings - consider-using-with, # Resource-related warning - use-symbolic-message-instead - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=10 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit,argparse.parse_error - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: fr_MC (myspell), fr_CA -# (myspell), fr_BE (myspell), fr_LU (myspell), fr_CH (myspell), fr_FR -# (myspell), ar (myspell), es_CR (myspell), de_CH_frami (myspell), es_EC -# (myspell), ar_YE (myspell), en_CA (myspell), ar_BH (myspell), ar_IN -# (myspell), ar_TN (myspell), en_ZA (myspell), de_DE_frami (myspell), ar_SY -# (myspell), ar_IQ (myspell), ar_LB (myspell), ar_KW (myspell), ru_RU -# (myspell), es_BO (myspell), en_GB (myspell), ar_SD (myspell), de_DE -# (myspell), es_CU (myspell), es_PA (myspell), ar_EG (myspell), es_HN -# (myspell), de_CH (myspell), es_NI (myspell), es_AR (myspell), es_ES -# (myspell), ar_SA (myspell), es_VE (myspell), de_AT_frami (myspell), it_IT -# (myspell), ar_OM (myspell), ar_DZ (myspell), it_CH (myspell), es_MX -# (myspell), es_PY (myspell), en_AU (myspell), es_DO (myspell), es_SV -# (myspell), es_PR (myspell), es_GT (myspell), ar_LY (myspell), ar_JO -# (myspell), en_US (myspell), de_AT (myspell), es_PE (myspell), ar_QA -# (myspell), es_CL (myspell), pt_BR (myspell), ar_AE (myspell), pt_PT -# (myspell), es_CO (myspell), es_UY (myspell), ar_MA (myspell), fr (myspell), -# es_US (myspell), en (aspell). -spelling-dict= - -# List of comma separated words that should be considered directives if they -# appear and the beginning of a comment and should not be checked. -spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -#notes-rgx= - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# class is considered mixin if its name matches the mixin-class-rgx option. -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local,main_window.page.MainPageBase - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules=wx - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# Regex pattern to define which classes are considered mixins ignore-mixin- -# members is set to 'yes' -mixin-class-rgx=.*[Mm]ixin - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -# Translation function is (unfortunately) defined as a buildin. -additional-builtins=_ - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of names allowed to shadow builtins -allowed-redefined-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -# On top of the defaults, simple unused is also permissible. -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_|^event$ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=past.builtins,future.builtins,builtins,io - - -[SIMILARITIES] - -# Comments are removed from the similarity computation -ignore-comments=yes - -# Docstrings are removed from the similarity computation -ignore-docstrings=yes - -# Imports are removed from the similarity computation -ignore-imports=no - -# Signatures are removed from the similarity computation -ignore-signatures=no - -# Minimum lines number of a similarity. -# Matching only larger chunks of code, not the default 4 lines. -min-similarity-lines=10 - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=any - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=any - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class constant names. -class-const-naming-style=UPPER_CASE - -# Regular expression matching correct class constant names. Overrides class- -# const-naming-style. -#class-const-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=any - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=any - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - x, - y, - z, - ex, - Run, - _ - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=any - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=any - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=120 - -# Maximum number of lines in a module. -max-module-lines=3000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules= - -# Output a graph (.gv or any supported image format) of external dependencies -# to the given file (report RP0402 must not be disabled). -ext-import-graph= - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be -# disabled). -import-graph= - -# Output a graph (.gv or any supported image format) of internal dependencies -# to the given file (report RP0402 must not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[CLASSES] - -# Warn about protected attribute access inside special methods -check-protected-access-in-special-methods=no - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -exclude-too-few-public-methods= - -# List of qualified class names to ignore when counting class parents (see -# R0901) -ignored-parents= - -# Maximum number of arguments for function / method. -# We tend to have function with more arguments than the default 5 -# and that doesn't seem to be the problem of our code. -max-args=25 - -# Maximum number of attributes for a class (see R0902). -max-attributes=40 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=30 - -# Maximum number of locals for function / method body. -max-locals=40 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=120 - -# Maximum number of return / yield for function / method body. -max-returns=10 - -# Maximum number of statements in function / method body. -max-statements=100 - -# Minimum number of public methods for a class (see R0903). -# It is okay to inherit and have only __init__. -min-public-methods=0 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/gui/wxpython/Makefile b/gui/wxpython/Makefile index b1785f75dc0..a661e593bfc 100644 --- a/gui/wxpython/Makefile +++ b/gui/wxpython/Makefile @@ -49,7 +49,7 @@ xml/module_tree_menudata.xml: core/toolboxes.py $(call run_grass,$(PYTHON) $< "validate" $@) menustrings.py: core/menutree.py $(DSTDIR)/xml/menudata.xml $(DSTDIR)/xml/module_tree_menudata.xml $(DSTDIR)/xml/menudata_modeler.xml $(DSTDIR)/xml/menudata_psmap.xml - @echo "# This is a generated file.\n" > $@ + @echo "# This is a generated file.\n# pylint: skip-file\n" > $@ $(call run_grass,$(PYTHON) $< "manager" >> $@) $(call run_grass,$(PYTHON) $< "module_tree" >> $@) $(call run_grass,$(PYTHON) $< "modeler" >> $@) diff --git a/gui/wxpython/animation/anim.py b/gui/wxpython/animation/anim.py index 591bbff151c..6496f3b4ca4 100644 --- a/gui/wxpython/animation/anim.py +++ b/gui/wxpython/animation/anim.py @@ -93,11 +93,11 @@ def SetCallbackOrientationChanged(self, callback): def Start(self): if not self.IsActive(): - return + pass def Pause(self, paused): if not self.IsActive(): - return + pass def Stop(self): if not self.IsActive(): diff --git a/gui/wxpython/animation/controller.py b/gui/wxpython/animation/controller.py index 1a8cb1fe9cc..808e9b20ef4 100644 --- a/gui/wxpython/animation/controller.py +++ b/gui/wxpython/animation/controller.py @@ -627,7 +627,6 @@ def export_avi_callback(event): del self.busy if error: GError(parent=self.frame, message=error) - return if exportInfo["method"] == "sequence": filename = os.path.join( diff --git a/gui/wxpython/animation/data.py b/gui/wxpython/animation/data.py index 3799cf43ef1..ec33ce082d2 100644 --- a/gui/wxpython/animation/data.py +++ b/gui/wxpython/animation/data.py @@ -240,9 +240,7 @@ def _computeRegions(self, count, startRegion, endRegion=None, zoomValue=None): del region["projection"] if "zone" in region: del region["zone"] - regions = [] - for i in range(self._mapCount): - regions.append(copy.copy(region)) + regions = [copy.copy(region) for i in range(self._mapCount)] self._regions = regions if not (endRegion or zoomValue): return @@ -295,9 +293,8 @@ def __init__(self): def SetName(self, name): if not self.hidden: if self._mapType is None: - raise ValueError( - "To set layer name, the type of layer must be specified." - ) + msg = "To set layer name, the type of layer must be specified." + raise ValueError(msg) if self._mapType in {"strds", "stvds", "str3ds"}: try: name = validateTimeseriesName(name, self._mapType) diff --git a/gui/wxpython/animation/g.gui.animation.html b/gui/wxpython/animation/g.gui.animation.html index 09a9630a54c..cf22e6b3cd1 100644 --- a/gui/wxpython/animation/g.gui.animation.html +++ b/gui/wxpython/animation/g.gui.animation.html @@ -81,7 +81,7 @@

    EXAMPLES

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/animation/nviztask.py b/gui/wxpython/animation/nviztask.py index 26605737ea8..5dbb1d3e982 100644 --- a/gui/wxpython/animation/nviztask.py +++ b/gui/wxpython/animation/nviztask.py @@ -136,9 +136,7 @@ def _processSurface(self, surface, mapName): self._setMultiTaskParam(mode2, value) # position - pos = [] - for coor in ("x", "y", "z"): - pos.append(str(surface["position"][coor])) + pos = [str(surface["position"][coor]) for coor in ("x", "y", "z")] value = ",".join(pos) self._setMultiTaskParam("surface_position", value) diff --git a/gui/wxpython/animation/provider.py b/gui/wxpython/animation/provider.py index 43929a1873d..1b07f63841f 100644 --- a/gui/wxpython/animation/provider.py +++ b/gui/wxpython/animation/provider.py @@ -657,7 +657,7 @@ def CompositeProcess( :param tempDir: directory for rendering :param cmdList: list of d.rast/d.vect commands :param region: region as a dict or None - :param opacites: list of opacities + :param opacities: list of opacities :param bgcolor: background color as a tuple of 3 values 0 to 255 :param fileQueue: the inter process communication queue storing the file name of the image @@ -896,7 +896,7 @@ def test(): if os.path.exists(tempDir): shutil.rmtree(tempDir) os.mkdir(tempDir) - # comment this line to keep the directory after prgm ends + # comment this line to keep the directory after program ends # cleanUp = CleanUp(tempDir) # import atexit # atexit.register(cleanUp) diff --git a/gui/wxpython/animation/temporal_manager.py b/gui/wxpython/animation/temporal_manager.py index 0f00cc6e329..814a48fcac1 100644 --- a/gui/wxpython/animation/temporal_manager.py +++ b/gui/wxpython/animation/temporal_manager.py @@ -116,8 +116,7 @@ def _setTemporalState(self): # check for units for relative type if relative: - units = set() - units.update(infoDict["unit"] for infoDict in self.timeseriesInfo.values()) + units = {infoDict["unit"] for infoDict in self.timeseriesInfo.values()} if len(units) > 1: message = _( "It is not allowed to display data with different units (%s)." @@ -162,10 +161,11 @@ def GetGranularity(self): return self._getCommonGranularity() def _getCommonGranularity(self): - allMaps = [] - for dataset in self.timeseriesList: - maps = self.timeseriesInfo[dataset]["maps"] - allMaps.extend(maps) + allMaps = [ + a + for dataset in self.timeseriesList + for a in self.timeseriesInfo[dataset]["maps"] + ] if self.temporalType == TemporalType.ABSOLUTE: gran = tgis.compute_absolute_time_granularity(allMaps) @@ -210,9 +210,9 @@ def GetLabelsAndMaps(self): newMapList[i : i + len(mapList)] = mapList newMapLists.append(newMapList) - mapDict = {} - for i, dataset in enumerate(self.timeseriesList): - mapDict[dataset] = newMapLists[i] + mapDict = { + dataset: newMapLists[i] for i, dataset in enumerate(self.timeseriesList) + } if self.temporalType == TemporalType.ABSOLUTE: # ('1996-01-01 00:00:00', '1997-01-01 00:00:00', 'year'), diff --git a/gui/wxpython/animation/utils.py b/gui/wxpython/animation/utils.py index 5f69bc40d35..54aafa3532f 100644 --- a/gui/wxpython/animation/utils.py +++ b/gui/wxpython/animation/utils.py @@ -15,7 +15,7 @@ This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. -@author Anna Perasova +@author Anna Petrasova """ import os @@ -212,8 +212,7 @@ def checkSeriesCompatibility(mapSeriesList=None, timeseriesList=None): ) if mapSeriesList: - count = set() - count.update(len(mapSeries) for mapSeries in mapSeriesList) + count = {len(mapSeries) for mapSeries in mapSeriesList} if len(count) > 1: raise GException( _( @@ -240,11 +239,9 @@ def ComputeScaledRect(sourceSize, destSize): """Fits source rectangle into destination rectangle by scaling and centering. - >>> ComputeScaledRect(sourceSize = (10, 40), destSize = (100, 50)) {'height': 50, 'scale': 1.25, 'width': 13, 'x': 44, 'y': 0} - :param sourceSize: size of source rectangle :param destSize: size of destination rectangle """ diff --git a/gui/wxpython/core/debug.py b/gui/wxpython/core/debug.py index b6c12a60b62..bcb33f4d1da 100644 --- a/gui/wxpython/core/debug.py +++ b/gui/wxpython/core/debug.py @@ -61,7 +61,7 @@ def msg(self, level, message, *args): :param args: formatting params """ # self.SetLevel() - if self.debuglevel > 0 and level > 0 and level <= self.debuglevel: + if 0 < level <= self.debuglevel: if args: sys.stderr.write( "GUI D%d/%d: " % (level, self.debuglevel) diff --git a/gui/wxpython/core/gcmd.py b/gui/wxpython/core/gcmd.py index dfd719b2ff0..d8254b4184f 100644 --- a/gui/wxpython/core/gcmd.py +++ b/gui/wxpython/core/gcmd.py @@ -25,17 +25,20 @@ @author Martin Landa """ +from __future__ import annotations + +import errno +import locale import os +import signal +import subprocess import sys import time -import errno -import signal import traceback -import locale -import subprocess from threading import Thread -import wx +from typing import TYPE_CHECKING, TextIO +import wx from core.debug import Debug from core.globalvar import SCT_EXT @@ -44,12 +47,16 @@ is_mswindows = sys.platform == "win32" if is_mswindows: + import msvcrt + from win32file import ReadFile, WriteFile from win32pipe import PeekNamedPipe - import msvcrt else: - import select import fcntl + import select + +if TYPE_CHECKING: + from io import TextIOWrapper def DecodeString(string): @@ -298,7 +305,7 @@ def _recv(self, which, maxsize): message = "Other end disconnected!" -def recv_some(p, t=0.1, e=1, tr=5, stderr=0): +def recv_some(p, t=0.1, e=1, tr=5, stderr=0) -> str: # TODO: use LiteralString on 3.11+ tr = max(tr, 1) x = time.time() + t y = [] @@ -342,7 +349,7 @@ def __init__( stdin=None, verbose=None, wait=True, - rerr=False, + rerr: bool | None = False, stdout=None, stderr=None, ): @@ -482,7 +489,7 @@ def __ProcessStdErr(self): type = "WARNING" elif "GRASS_INFO_ERROR" in line: # error type = "ERROR" - elif "GRASS_INFO_END": # end of message + elif "GRASS_INFO_END" in line: # end of message msg.append((type, content)) type = None content = "" @@ -497,7 +504,7 @@ def __ProcessStdErr(self): def __GetError(self): """Get error message or ''""" if not self.cmdThread.module: - return _("Unable to exectute command: '%s'") % " ".join(self.cmd) + return _("Unable to execute command: '%s'") % " ".join(self.cmd) for type, msg in self.__ProcessStdErr(): if type == "ERROR": @@ -510,7 +517,14 @@ class CommandThread(Thread): """Create separate thread for command. Used for commands launched on the background.""" - def __init__(self, cmd, env=None, stdin=None, stdout=sys.stdout, stderr=sys.stderr): + def __init__( + self, + cmd, + env=None, + stdin: TextIOWrapper | None = None, + stdout: TextIO = sys.stdout, + stderr: TextIO = sys.stderr, + ) -> None: """ :param cmd: command (given as list) :param env: environmental variables @@ -522,11 +536,11 @@ def __init__(self, cmd, env=None, stdin=None, stdout=sys.stdout, stderr=sys.stde self.cmd = cmd self.stdin = stdin - self.stdout = stdout - self.stderr = stderr + self.stdout: TextIO = stdout + self.stderr: TextIO = stderr self.env = env - self.module = None + self.module: Popen | None = None self.error = "" self._want_abort = False @@ -584,7 +598,7 @@ def run(self): print(e, file=sys.stderr) return 1 - if self.stdin: # read stdin if requested ... + if self.stdin and self.module.stdin is not None: # read stdin if requested... self.module.stdin.write(self.stdin) self.module.stdin.close() @@ -593,14 +607,14 @@ def run(self): def _redirect_stream(self): """Redirect stream""" - if self.stdout: + if self.stdout and self.module is not None and self.module.stdout is not None: # make module stdout/stderr non-blocking out_fileno = self.module.stdout.fileno() if not is_mswindows: flags = fcntl.fcntl(out_fileno, fcntl.F_GETFL) fcntl.fcntl(out_fileno, fcntl.F_SETFL, flags | os.O_NONBLOCK) - if self.stderr: + if self.stderr and self.module is not None and self.module.stderr is not None: # make module stdout/stderr non-blocking out_fileno = self.module.stderr.fileno() if not is_mswindows: @@ -608,19 +622,20 @@ def _redirect_stream(self): fcntl.fcntl(out_fileno, fcntl.F_SETFL, flags | os.O_NONBLOCK) # wait for the process to end, sucking in stuff until it does end - while self.module.poll() is None: - if self._want_abort: # abort running process - self.module.terminate() - self.aborted = True - return - if self.stdout: - line = recv_some(self.module, e=0, stderr=0) - self.stdout.write(line) - if self.stderr: - line = recv_some(self.module, e=0, stderr=1) - self.stderr.write(line) - if len(line) > 0: - self.error = line + if self.module is not None: + while self.module.poll() is None: + if self._want_abort: # abort running process + self.module.terminate() + self.aborted = True + return + if self.stdout: + line = recv_some(self.module, e=0, stderr=0) + self.stdout.write(line) + if self.stderr: + line = recv_some(self.module, e=0, stderr=1) + self.stderr.write(line) + if len(line) > 0: + self.error = line # get the last output if self.stdout: @@ -632,12 +647,12 @@ def _redirect_stream(self): if len(line) > 0: self.error = line - def abort(self): + def abort(self) -> None: """Abort running process, used by main thread to signal an abort""" self._want_abort = True -def _formatMsg(text): +def _formatMsg(text: str) -> str: """Format error messages for dialogs""" message = "" for line in text.splitlines(): @@ -660,14 +675,14 @@ def _formatMsg(text): def RunCommand( prog, flags="", - overwrite=False, - quiet=False, - verbose=False, + overwrite: bool = False, + quiet: bool = False, + verbose: bool = False, parent=None, - read=False, + read: bool = False, parse=None, - stdin=None, - getErrorMsg=False, + stdin: TextIO | None = None, + getErrorMsg: bool = False, env=None, **kwargs, ): @@ -717,7 +732,7 @@ def RunCommand( ps = grass.start_command(prog, flags, overwrite, quiet, verbose, env=env, **kwargs) - if stdin: + if stdin and ps.stdin: ps.stdin.write(encode(stdin)) ps.stdin.close() ps.stdin = None @@ -764,7 +779,7 @@ def RunCommand( return stdout, _formatMsg(stderr) -def GetDefaultEncoding(forceUTF8=False): +def GetDefaultEncoding(forceUTF8: bool = False) -> str: """Get default system encoding :param bool forceUTF8: force 'UTF-8' if encoding is not defined @@ -786,4 +801,4 @@ def GetDefaultEncoding(forceUTF8=False): return enc -_enc = GetDefaultEncoding() # define as global variable +_enc: str = GetDefaultEncoding() # define as global variable diff --git a/gui/wxpython/core/gconsole.py b/gui/wxpython/core/gconsole.py index cde41710cd9..d5fc21c5b0d 100644 --- a/gui/wxpython/core/gconsole.py +++ b/gui/wxpython/core/gconsole.py @@ -316,7 +316,7 @@ def write(self, s): if "GRASS_INFO_PERCENT" in line: value = int(line.rsplit(":", 1)[1].strip()) - progressValue = value if value >= 0 and value < 100 else 0 + progressValue = value if 0 <= value < 100 else 0 elif "GRASS_INFO_MESSAGE" in line: self.type = "message" self.message += line.split(":", 1)[1].strip() + "\n" diff --git a/gui/wxpython/core/giface.py b/gui/wxpython/core/giface.py index 3a1d8c61612..af7accf19b3 100644 --- a/gui/wxpython/core/giface.py +++ b/gui/wxpython/core/giface.py @@ -22,9 +22,6 @@ from grass.pydispatch.signal import Signal -# to disable Abstract class not referenced -# pylint: disable=R0921 - class Notification: """Enum class for notifications suggestions. @@ -91,7 +88,7 @@ def GetLayersByName(self, name): .. todo:: if common usage is just to check the presence of layer, - intoroduce a new method ContainsLayerByName(name) + introduce a new method ContainsLayerByName(name) """ raise NotImplementedError diff --git a/gui/wxpython/core/globalvar.py b/gui/wxpython/core/globalvar.py index 7624183796c..a5ce362f8c8 100644 --- a/gui/wxpython/core/globalvar.py +++ b/gui/wxpython/core/globalvar.py @@ -95,7 +95,8 @@ def CheckForWx(): version = parse_version_string(wx.__version__) if version < WXPY3_MIN_VERSION: - raise ValueError("Your wxPython version is {}".format(wx.__version__)) + msg = "Your wxPython version is {}".format(wx.__version__) + raise ValueError(msg) return except ImportError as e: print("ERROR: wxGUI requires wxPython. {}".format(e), file=sys.stderr) @@ -123,14 +124,12 @@ def CheckForWx(): import wx.lib.flatnotebook as FN -""" -Query layer (generated for example by selecting item in the Attribute Table Manager) -Deleted automatically on re-render action -""" +# Query layer (generated for example by selecting item in the Attribute Table Manager) +# Deleted automatically on re-render action # temporal query layer (removed on re-render action) QUERYLAYER = "qlayer" -"""Style definition for FlatNotebook pages""" +# Style definition for FlatNotebook pages FNPageStyle = ( FN.FNB_NODRAG | FN.FNB_TABS_BORDER_SIMPLE @@ -142,7 +141,7 @@ def CheckForWx(): FN.FNB_BOTTOM | FN.FNB_NODRAG | FN.FNB_NO_NAV_BUTTONS | FN.FNB_NO_X_BUTTON ) -"""Dialog widget dimension""" +# Dialog widget dimension DIALOG_SPIN_SIZE = (150, -1) DIALOG_COMBOBOX_SIZE = (300, -1) DIALOG_GSELECT_SIZE = (400, -1) @@ -233,21 +232,21 @@ def UpdateGRASSAddOnCommands(eList=None): Debug.msg(1, "Number of GRASS AddOn commands: %d", nCmd) -"""@brief Collected GRASS-relared binaries/scripts""" +# Collected GRASS-related binaries/scripts grassCmd, grassScripts = get_commands() Debug.msg(1, "Number of core GRASS commands: %d", len(grassCmd)) UpdateGRASSAddOnCommands() -"""@Toolbar icon size""" +# Toolbar icon size toolbarSize = (24, 24) -"""@Check version of wxPython, use agwStyle for 2.8.11+""" +# Check version of wxPython, use agwStyle for 2.8.11+ hasAgw = CheckWxVersion([2, 8, 11, 0]) -wxPythonPhoenix = CheckWxPhoenix() +wxPythonPhoenix: bool = CheckWxPhoenix() gtk3 = "gtk3" in wx.PlatformInfo -"""@Add GUIDIR/scripts into path""" +# Add GUIDIR/scripts into path os.environ["PATH"] = os.path.join(GUIDIR, "scripts") + os.pathsep + os.environ["PATH"] ignoredCmdPattern = ( diff --git a/gui/wxpython/core/layerlist.py b/gui/wxpython/core/layerlist.py index 5b14a4039fd..43b1a4d118d 100644 --- a/gui/wxpython/core/layerlist.py +++ b/gui/wxpython/core/layerlist.py @@ -38,8 +38,9 @@ def GetSelectedLayers(self, activeOnly=True): layers = [] for layer in self._list: if layer.IsSelected(): - if activeOnly and layer.IsActive(): - layers.append(layer) + if activeOnly: + if layer.IsActive(): + layers.append(layer) else: layers.append(layer) return layers @@ -63,11 +64,7 @@ def GetLayersByTypes(self, mapTypes): :param mapTypes: list of types """ - layers = [] - for layer in self._list: - if layer.mapType in mapTypes: - layers.append(layer) - return layers + return [layer for layer in self._list if layer.mapType in mapTypes] def AddNewLayer( self, @@ -222,15 +219,15 @@ def SetName(self, name): len(fullName) == 1 and self._mapType != "rgb" ): # skip checking rgb maps for now if self._mapType is None: - raise ValueError( - "To set layer name, the type of layer must be specified." - ) + msg = "To set layer name, the type of layer must be specified." + raise ValueError(msg) res = gcore.find_file( name=fullName, element=self._internalTypes[self._mapType] ) if not res["mapset"]: - raise ValueError("Map <{name}> not found.".format(name=name)) + msg = "Map <{name}> not found.".format(name=name) + raise ValueError(msg) self._name = name + "@" + res["mapset"] else: self._name = name @@ -263,7 +260,8 @@ def SetMapType(self, mapType): :param mapType: can be 'raster', 'vector', 'raster_3d' """ if mapType not in self._mapTypes: - raise ValueError("Wrong map type used: {mtype}".format(mtype=mapType)) + msg = "Wrong map type used: {mtype}".format(mtype=mapType) + raise ValueError(msg) self._mapType = mapType @@ -282,9 +280,8 @@ def SetOpacity(self, opacity): :param float opacity: value between 0 and 1 """ if not (0 <= opacity <= 1): - raise ValueError( - "Opacity value must be between 0 and 1, not {op}.".format(op=opacity) - ) + msg = "Opacity value must be between 0 and 1, not {op}.".format(op=opacity) + raise ValueError(msg) self._opacity = opacity opacity = property(fget=GetOpacity, fset=SetOpacity) diff --git a/gui/wxpython/core/menutree.py b/gui/wxpython/core/menutree.py index b8f60e9610a..a418ffcd98a 100644 --- a/gui/wxpython/core/menutree.py +++ b/gui/wxpython/core/menutree.py @@ -136,7 +136,7 @@ def _createItem(self, item, node): elif item.tag == "menu": self._createMenu(item, node) else: - raise ValueError(_("Unknow tag %s") % item.tag) + raise ValueError(_("Unknown tag %s") % item.tag) def GetModel(self, separators=False): """Returns copy of model with or without separators @@ -256,7 +256,7 @@ def collectParents(node, parents): else: import grass.script.core as gscore - gscore.fatal("Unknown value for parameter menu: " % menu) + gscore.fatal("Unknown value for parameter menu: %s" % menu) if action == "strings": menudata.PrintStrings(sys.stdout) @@ -269,6 +269,6 @@ def collectParents(node, parents): else: import grass.script.core as gscore - gscore.fatal("Unknown value for parameter action: " % action) + gscore.fatal("Unknown value for parameter action: %s" % action) sys.exit(0) diff --git a/gui/wxpython/core/render.py b/gui/wxpython/core/render.py index b5fdf34d7dd..26a984990e7 100644 --- a/gui/wxpython/core/render.py +++ b/gui/wxpython/core/render.py @@ -116,9 +116,7 @@ def __init__( self.name = name if self.type == "command": - self.cmd = [] - for c in cmd: - self.cmd.append(cmdlist_to_tuple(c)) + self.cmd = [] + [cmdlist_to_tuple(c) for c in cmd] else: self.cmd = cmdlist_to_tuple(cmd) @@ -326,9 +324,7 @@ def SetOpacity(self, value): def SetCmd(self, cmd): """Set new command for layer""" if self.type == "command": - self.cmd = [] - for c in cmd: - self.cmd.append(cmdlist_to_tuple(c)) + self.cmd = [] + [cmdlist_to_tuple(c) for c in cmd] else: self.cmd = cmdlist_to_tuple(cmd) Debug.msg(3, "Layer.SetCmd(): cmd='%s'" % self.GetCmd(string=True)) @@ -658,7 +654,7 @@ def Render(self, force=False, windres=False): def OnRenderDone(self, env): """Rendering process done - Make image composiotion, emits updateMap event. + Make image composition, emits updateMap event. """ maps = [] masks = [] @@ -1055,8 +1051,7 @@ def GetRegion( env["GISRC"] = self.gisrc # do not update & shell style output - cmd = {} - cmd["flags"] = "ugpc" + cmd = {"flags": "ugpc"} if default: cmd["flags"] += "d" @@ -1393,7 +1388,7 @@ def AddLayer( def DeleteAllLayers(self, overlay=False): """Delete all layers - :param overlay: True to delete also overlayes + :param overlay: True to also delete overlays """ self.layers = [] if overlay: @@ -1409,9 +1404,9 @@ def DeleteLayer(self, layer, overlay=False): """ Debug.msg(3, "Map.DeleteLayer(): name=%s" % layer.name) - _list = self.overlays if overlay else self.layers + list_ = self.overlays if overlay else self.layers - if layer in _list: + if layer in list_: if layer.mapfile: base, mapfile = os.path.split(layer.mapfile) tempbase = mapfile.split(".")[0] @@ -1428,7 +1423,7 @@ def DeleteLayer(self, layer, overlay=False): if os.path.isfile(layer._legrow): os.remove(layer._legrow) - _list.remove(layer) + list_.remove(layer) self.layerRemoved.emit(layer=layer) return layer @@ -1563,10 +1558,10 @@ def GetLayerIndex(self, layer, overlay=False): :return: layer index :return: -1 if layer not found """ - _list = self.overlays if overlay else self.layers + list_ = self.overlays if overlay else self.layers - if layer in _list: - return _list.index(layer) + if layer in list_: + return list_.index(layer) return -1 @@ -1656,10 +1651,7 @@ def GetOverlay(self, id, list=False): :return: overlay (list=False) :return: None (list=False) if no overlay or more overlays found """ - ovl = [] - for overlay in self.overlays: - if overlay.id == id: - ovl.append(overlay) + ovl = [overlay for overlay in self.overlays if overlay.id == id] if not list: if len(ovl) != 1: diff --git a/gui/wxpython/core/settings.py b/gui/wxpython/core/settings.py index 05c6579e636..aa51f8fb207 100644 --- a/gui/wxpython/core/settings.py +++ b/gui/wxpython/core/settings.py @@ -1230,12 +1230,13 @@ def GetDisplayVectSettings(): % UserSettings.Get(group="vectorLayer", key="point", subkey="size"), ) ) - types = [] - for ftype in ["point", "line", "boundary", "centroid", "area", "face"]: + types = [ + ftype + for ftype in ["point", "line", "boundary", "centroid", "area", "face"] if UserSettings.Get( group="vectorLayer", key="showType", subkey=[ftype, "enabled"] - ): - types.append(ftype) + ) + ] settings.append("type=%s" % ",".join(types)) if UserSettings.Get(group="vectorLayer", key="randomColors", subkey="enabled"): diff --git a/gui/wxpython/core/toolboxes.py b/gui/wxpython/core/toolboxes.py index 46b886e4996..d3b50cb1ea5 100644 --- a/gui/wxpython/core/toolboxes.py +++ b/gui/wxpython/core/toolboxes.py @@ -308,7 +308,7 @@ def toolboxes2menudata(mainMenu, toolboxes, userToolboxes, wxguiItems, moduleIte userHasToolboxes = False - # in case user has empty toolboxes file (to avoid genereation) + # in case user has empty toolboxes file (to avoid generation) if userToolboxes and userToolboxes.findall(".//toolbox"): _expandUserToolboxesItem(root, userToolboxes) _expandToolboxes(root, userToolboxes) @@ -746,7 +746,7 @@ def _convertTree(root): def _getXMLString(root): """Converts XML tree to string - Since it is usually requier, this function adds a comment (about + Since it is usually required, this function adds a comment (about autogenerated file) to XML file. :return: XML as string diff --git a/gui/wxpython/core/units.py b/gui/wxpython/core/units.py index 46b8f91266b..51a3d81174f 100644 --- a/gui/wxpython/core/units.py +++ b/gui/wxpython/core/units.py @@ -195,7 +195,7 @@ def formatDist(distance, mapunits): outdistance = round(distance / divisor, 1) elif (distance / divisor) > 0.0: outdistance = round( - distance / divisor, int(math.ceil(3 - math.log10(distance / divisor))) + distance / divisor, math.ceil(3 - math.log10(distance / divisor)) ) else: outdistance = float(distance / divisor) diff --git a/gui/wxpython/core/utils.py b/gui/wxpython/core/utils.py index 6a39a158555..47fd14003b3 100644 --- a/gui/wxpython/core/utils.py +++ b/gui/wxpython/core/utils.py @@ -217,11 +217,7 @@ def GetValidLayerName(name): cIdx = 0 retNameList = list(retName) for c in retNameList: - if ( - not (c >= "A" and c <= "Z") - and not (c >= "a" and c <= "z") - and not (c >= "0" and c <= "9") - ): + if not ("A" <= c <= "Z") and not ("a" <= c <= "z") and not ("0" <= c <= "9"): retNameList[cIdx] = "_" cIdx += 1 retName = "".join(retNameList) @@ -311,7 +307,8 @@ def ListOfMapsets(get="ordered"): mapsets_ordered.append(mapset) return mapsets_ordered - raise ValueError("Invalid value for 'get' parameter of ListOfMapsets()") + msg = "Invalid value for 'get' parameter of ListOfMapsets()" + raise ValueError(msg) def ListSortLower(list): @@ -437,7 +434,7 @@ def __ll_parts(value, reverse=False, precision=3): if value == 0.0: return "%s%.*f" % ("00:00:0", precision, 0.0) - d = int(int(value)) + d = int(value) m = int((value - d) * 60) s = ((value - d) * 60 - m) * 60 if m < 0: @@ -849,8 +846,7 @@ def StoreEnvVariable(key, value=None, envFile=None): # update environmental variables if value is None: - if key in environ: - del environ[key] + environ.pop(key, None) else: environ[key] = value @@ -922,9 +918,8 @@ def SetAddOnPath(addonPath=None, key="PATH"): "white": (255, 255, 255), "yellow": (255, 255, 0), } -rgb2str = {} -for s, r in str2rgb.items(): - rgb2str[r] = s + +rgb2str = {r: s for s, r in str2rgb.items()} # ensure that gray value has 'gray' string and not 'grey' rgb2str[str2rgb["gray"]] = "gray" # purple is defined as nickname for violet in lib/gis @@ -978,9 +973,7 @@ def color_resolve(color): "d.polar": "polar", "d.legend.vect": "vectleg", } -ltype2command = {} -for cmd, ltype in command2ltype.items(): - ltype2command[ltype] = cmd +ltype2command = {ltype: cmd for cmd, ltype in command2ltype.items()} def GetGEventAttribsForHandler(method, event): diff --git a/gui/wxpython/core/workspace.py b/gui/wxpython/core/workspace.py index dd460a68195..bd09a9354a6 100644 --- a/gui/wxpython/core/workspace.py +++ b/gui/wxpython/core/workspace.py @@ -1461,7 +1461,7 @@ def __writeNvizVector(self, data): self.indent -= 4 def __writeNvizState(self, view, iview, light, constants): - """ "Save Nviz properties (view, light) to workspace + """Save Nviz properties (view, light) to workspace :param view: Nviz view properties :param iview: Nviz internal view properties diff --git a/gui/wxpython/datacatalog/g.gui.datacatalog.html b/gui/wxpython/datacatalog/g.gui.datacatalog.html index 9a918dd3039..8fe684349ba 100644 --- a/gui/wxpython/datacatalog/g.gui.datacatalog.html +++ b/gui/wxpython/datacatalog/g.gui.datacatalog.html @@ -33,12 +33,12 @@

    NOTES

    WARNING

    When renaming, copying or deleting maps outside of Data Catalog, you need to reload -the current mapset or entire database, because it is currently not synchronised. +the current mapset or entire database, because it is currently not synchronized.

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/datacatalog/tree.py b/gui/wxpython/datacatalog/tree.py index bf06fd11ab8..76aee7e5eff 100644 --- a/gui/wxpython/datacatalog/tree.py +++ b/gui/wxpython/datacatalog/tree.py @@ -1892,6 +1892,7 @@ def done(event): gs.try_remove(event.userData) for i in range(len(self.selected_layer)): + cmd: list[str] = [] if self.selected_layer[i].data["type"] == "raster": cmd = ["r.info"] elif self.selected_layer[i].data["type"] == "vector": diff --git a/gui/wxpython/dbmgr/base.py b/gui/wxpython/dbmgr/base.py index aeb1a2b3090..20ddbc47f25 100644 --- a/gui/wxpython/dbmgr/base.py +++ b/gui/wxpython/dbmgr/base.py @@ -747,7 +747,7 @@ def __init__( :param item: item from Layer Tree :param log: log window :param statusbar: widget with statusbar - :param kwagrs: other wx.Frame's arguments + :param kwargs: other wx.Frame's arguments """ # stores all data, which are shared by pages @@ -918,7 +918,7 @@ def __init__(self, parent, parentDbMgrBase): self.listOfCommands = [] self.listOfSQLStatements = [] - # initializet pages + # initialize pages self.pages = self.parentDbMgrBase.pages # shared data among pages @@ -1138,7 +1138,7 @@ def __init__(self, parent, parentDbMgrBase, onlyLayer=-1): def AddLayer(self, layer, pos=-1): """Adds tab which represents table and enables browse it - :param layer: vector map layer conntected to table + :param layer: vector map layer connected to table :param pos: position of tab, if -1 it is added to end :return: True if layer was added @@ -4030,7 +4030,7 @@ def Update(self, driver, database, table, column): database=database, ) if not dataStr: - GError(parent=self.parent, message=_("Unable to calculte statistics.")) + GError(parent=self.parent, message=_("Unable to calculate statistics.")) self.Close() return @@ -4039,7 +4039,7 @@ def Update(self, driver, database, table, column): GError( parent=self.parent, message=_( - "Unable to calculte statistics. " + "Unable to calculate statistics. " "Invalid number of lines %d (should be %d)." ) % (len(dataLines), len(stats)), @@ -4066,7 +4066,7 @@ def Update(self, driver, database, table, column): ) if not dataVar: GWarning( - parent=self.parent, message=_("Unable to calculte standard deviation.") + parent=self.parent, message=_("Unable to calculate standard deviation.") ) varSum = 0 for var in decode(dataVar).splitlines(): diff --git a/gui/wxpython/dbmgr/g.gui.dbmgr.html b/gui/wxpython/dbmgr/g.gui.dbmgr.html index 9ea595a754e..114e36fd46a 100644 --- a/gui/wxpython/dbmgr/g.gui.dbmgr.html +++ b/gui/wxpython/dbmgr/g.gui.dbmgr.html @@ -41,7 +41,7 @@

    SQL Builder

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    @@ -74,7 +74,7 @@

    SEE ALSO

    AUTHORS

    -Martin Landa, FBK-irst (2007-2008), +Martin Landa, FBK-irst (2007-2008), Trento, Italy, and OSGeoREL at the Czech Technical University in Prague, Czech Republic
    Michael Barton, Arizona State University, USA
    diff --git a/gui/wxpython/dbmgr/manager.py b/gui/wxpython/dbmgr/manager.py index c3821abb4ef..86c522282fd 100644 --- a/gui/wxpython/dbmgr/manager.py +++ b/gui/wxpython/dbmgr/manager.py @@ -65,7 +65,7 @@ def __init__( :param item: item from Layer Tree :param log: log window :param selection: name of page to be selected - :param kwagrs: other wx.Frame's arguments + :param kwargs: other wx.Frame's arguments """ self.parent = parent try: diff --git a/gui/wxpython/dbmgr/sqlbuilder.py b/gui/wxpython/dbmgr/sqlbuilder.py index 51010e49677..fd9378846d7 100644 --- a/gui/wxpython/dbmgr/sqlbuilder.py +++ b/gui/wxpython/dbmgr/sqlbuilder.py @@ -49,7 +49,7 @@ class SQLBuilder(wx.Frame): - """SQLBuider class + """SQLBuilder class Base class for classes, which builds SQL statements. """ @@ -314,8 +314,8 @@ def _doLayout(self, modeChoices, showDbInfo=False): flag=wx.LEFT | wx.RIGHT | wx.BOTTOM | wx.EXPAND, border=5, ) - # self.pagesizer.Add(self.btn_uniqe,0,wx.ALIGN_LEFT|wx.TOP,border=5) - # self.pagesizer.Add(self.btn_uniqesample,0,wx.ALIGN_LEFT|wx.TOP,border=5) + # self.pagesizer.Add(self.btn_unique,0,wx.ALIGN_LEFT|wx.TOP,border=5) + # self.pagesizer.Add(self.btn_uniquesample,0,wx.ALIGN_LEFT|wx.TOP,border=5) self.pagesizer.Add( self.btn_logicpanel, proportion=0, flag=wx.ALIGN_CENTER_HORIZONTAL ) diff --git a/gui/wxpython/dbmgr/vinfo.py b/gui/wxpython/dbmgr/vinfo.py index c32ac5572b4..94dda140406 100644 --- a/gui/wxpython/dbmgr/vinfo.py +++ b/gui/wxpython/dbmgr/vinfo.py @@ -120,9 +120,7 @@ def SelectByPoint(self, queryCoords, qdist): return None # process attributes - ret = {} - for key in ["Category", "Layer", "Table", "Id"]: - ret[key] = [] + ret = {key: [] for key in ["Category", "Layer", "Table", "Id"]} for record in data: if "Table" not in record: diff --git a/gui/wxpython/docs/wxGUI.components.html b/gui/wxpython/docs/wxGUI.components.html index 5dd0cb3b657..b1be3d092f5 100644 --- a/gui/wxpython/docs/wxGUI.components.html +++ b/gui/wxpython/docs/wxGUI.components.html @@ -1,6 +1,13 @@ +

    KEYWORDS

    + + +general, GUI + +

    DESCRIPTION

    + List of available wxGUI components: + +

    SEE ALSO

    + + + wxGUI + diff --git a/gui/wxpython/docs/wxGUI.html b/gui/wxpython/docs/wxGUI.html index 1c92004e4d3..70643d6b24f 100644 --- a/gui/wxpython/docs/wxGUI.html +++ b/gui/wxpython/docs/wxGUI.html @@ -110,38 +110,38 @@

    Layer Manager Toolbar

    icon  - Add 3D raster map layer
    + Add 3D raster map layer
    Adds 3D raster map to layer tree.
    icon  - Add RGB raster layer
    + Add RGB raster layer
    Combines and displays three raster maps defined as red, green, - and blue channels to create an RGB color map, - see d.rgb.
    + and blue channels to create an RGB color map, + see d.rgb.
    icon  - Add HIS raster layer
    + Add HIS raster layer
    Combines and displays two or three raster maps defined as hue, - intensity, and (optionally) saturation channels to create a color map, - see d.his.
    + intensity, and (optionally) saturation channels to create a color map, + see d.his.
    icon  - Add shaded relief raster map layer
    + Add shaded relief raster map layer
    Adds shaded relief raster map layer, see r.relief and d.shade.
    icon  - Add raster arrows layer
    + Add raster arrows layer
    Adds map of raster cells with directional arrows drawn. Arrow - direction and length are determined by separate aspect/directional map - and (optional) slope/intensity map, - see d.rast.arrow.
    + direction and length are determined by separate aspect/directional map + and (optional) slope/intensity map, + see d.rast.arrow.
    icon  - Add raster numbers layer
    + Add raster numbers layer
    Adds map of raster cells with numbers representing the cell values, - see d.rast.num.
    + see d.rast.num.
    icon  @@ -155,26 +155,26 @@

    Layer Manager Toolbar

    icon  - Add thematic area (choropleth) map layer - (for all vector types)
    + Add thematic area (choropleth) map layer + (for all vector types)
    Adds layer for thematic display values from a numeric attribute - column associated with a vector map. Options include: thematic display - type (graduated colors or point sizes), methods for creating display - intervals, SQL query of attribute column to limit vector objects to - display, control of point icon types and sizes, control of thematic - color schemes, creation of legend for thematic map, and saving the - results of thematic mapping to a ps.map instructions file for later - printing, - see d.vect.thematic.
    + column associated with a vector map. Options include: thematic display + type (graduated colors or point sizes), methods for creating display + intervals, SQL query of attribute column to limit vector objects to + display, control of point icon types and sizes, control of thematic + color schemes, creation of legend for thematic map, and saving the + results of thematic mapping to a ps.map instructions file for later + printing, + see d.vect.thematic.
    icon  - Add thematic chart layer (for vector points)
    + Add thematic chart layer (for vector points)
    Adds layer in which pie or bar charts can be automatically created - at vector point locations. Charts display values from selected columns - in the associated attribute table. Options include: chart type, layer - and attributes to chart, chart colors, and chart size (fixed or based - on attribute column), - see d.vect.chart.
    + at vector point locations. Charts display values from selected columns + in the associated attribute table. Options include: chart type, layer + and attributes to chart, chart colors, and chart size (fixed or based + on attribute column), + see d.vect.chart.
    icon  @@ -186,32 +186,32 @@

    Layer Manager Toolbar

    Opens a dropdown menu that allows user to select to:
    icon  - Add overlay grids and lines
    + Add overlay grids and lines
    Adds layer to display regular grid - see d.grid
    + see d.grid
    icon  - Add labels layer for vector objects (from existing labels file)
    + Add labels layer for vector objects (from existing labels file)
    Add a layer of text from a labels file for vector objects - created with the v.label module. - A labels file can also be created with a text editor, - see d.labels.
    + created with the v.label module. + A labels file can also be created with a text editor, + see d.labels.
    icon  - Add geodesic line layer
    + Add geodesic line layer
    Add layer to display geodesic line for latitude/longitude projects only, - see d.geodesic
    + see d.geodesic
    icon  - Add rhumbline layer -
    Add layer to display rhumblines (for latitude/longitude projects only), + Add rhumbline layer +
    Add layer to display rhumblines (for latitude/longitude projects only), see d.rhumbline.
    icon  - Add command layer
    + Add command layer
    Adds a layer in which a GRASS GIS command or command list can be entered. - For a command list use the semi-colon (";") symbol as a separator. - For example: + For a command list use the semi-colon (";") symbol as a separator. + For example:
     d.rast soils;d.rast -o roads;d.vect streams col=blue
    @@ -241,37 +241,37 @@ 

    Layer Manager Toolbar

    icon  - Import raster data
    + Import raster data
    Import selected raster data into GRASS using r.in.gdal and load them into current layer tree.
    icon  - Link external raster data
    + Link external raster data
    Link selected external raster data as GRASS raster maps (using r.external) and load them into current layer tree.
    icon  - Set raster output format
    + Set raster output format
    Define external format for newly created raster maps (see r.external.out for details)
    icon  - Import vector data
    + Import vector data
    Import selected vector data into GRASS using v.in.ogr and load them into current layer tree.
    icon  - Link external vector data
    + Link external vector data
    Link selected external vector data as GRASS vector maps (using v.external) and load them into current layer tree.
    icon  - Set vector output format
    + Set vector output format
    Define external format for newly created vector maps (see v.external.out for details)
    @@ -375,10 +375,10 @@

    Map Display Toolbar

    icon  Query raster/vector maps
    Query selected raster, RGB raster (all three map channels will be - queried), or vector map(s) using the mouse. Map(s) must be selected - before query. Vector charts and thematic vector maps cannot be - queried. The results of the query will be displayed in a dialog. - See r.what, v.what. + queried), or vector map(s) using the mouse. Map(s) must be selected + before query. Vector charts and thematic vector maps cannot be + queried. The results of the query will be displayed in a dialog. + See r.what, v.what.
    icon  @@ -441,7 +441,7 @@

    Map Display Toolbar

  • Zoom to saved region. Zooms to previously saved named region.
  • Set computational region extent from display. - The computational region (the mapset's WIND file) + The computational region (the mapset's WIND file) is set to match the current display extent (does not change the resolution), see g.region.
  • Set computational region extent interactively. @@ -473,9 +473,9 @@

    Map Display Toolbar

    icon  Profile surface map
    Interactively create profile of a raster map. Profile transect is - drawn with the mouse in map display. The profile may be of the - displayed map or a different map. Up to three maps can be profiled - simultaneously.
    + drawn with the mouse in map display. The profile may be of the + displayed map or a different map. Up to three maps can be profiled + simultaneously.
  • icon  Create bivariate scatterplot of raster maps
    @@ -546,25 +546,25 @@

    Map Display Toolbar

    2D view
    Normal GIS display. All active layers are composited and displayed - in 2D mode.
    + in 2D mode.
    3D view
    Experimental replacement for NVIZ. Displays all active layers in - 3D perspective using OpenGL. A new control panel opens to manage the - 3D view. 3D view can be zoomed, panned, rotated, and tilted. The - vertical exaggeration of rasters and 3D vectors can be set. Various - color and lighten settings are possible. Not yet functional for - Windows platforms
    + 3D perspective using OpenGL. A new control panel opens to manage the + 3D view. 3D view can be zoomed, panned, rotated, and tilted. The + vertical exaggeration of rasters and 3D vectors can be set. Various + color and lighten settings are possible. Not yet functional for + Windows platforms
    Vector digitizer
    Puts display into vector digitizing mode and opens a new digitizing - toolbar. The user can digitize a new vector map or edit an existing - map.
    + toolbar. The user can digitize a new vector map or edit an existing + map.
    Raster digitizer
    Puts display into raster digitizing mode and opens a new digitizing - toolbar. The user can digitize a new raster map or edit an existing - map.
    + toolbar. The user can digitize a new raster map or edit an existing + map. @@ -657,9 +657,9 @@

    Starting the GUI from command line

    Alternatively, it may be defined in the main configuration file -($HOME/.grass8/rc on GNU/Linux and macOS, %APPDATA%\Roaming\GRASS8\rc -on MS Windows) using the GUI variable set to wxpython -(GUI: wxpython) or by the environmental variable GRASS_GUI. +($HOME/.grass8/rc on GNU/Linux and macOS, %APPDATA%\Roaming\GRASS8\rc +on MS Windows) using the GUI variable set to wxpython +(GUI: wxpython) or by the environmental variable GRASS_GUI. To start with a previously saved workspace file: @@ -670,7 +670,7 @@

    Starting the GUI from command line

    The user can also start GRASS from the shell command line with the wxGUI -specifying the --gui switch: +specifying the --gui switch:

     grass --gui
    @@ -680,20 +680,20 @@ 

    Starting the GUI from command line

    The GUI can be quit by selecting the 'File > Quit GRASS GIS' menu item which gives options to close only GUI or to quit GRASS GIS entirely if GRASS GIS is running with a command line (a shell in a terminal application). -Exiting the shell (typically by the exit command) ends the GRASS session +Exiting the shell (typically by the exit command) ends the GRASS session including any running GUIs.

    Background information

    wxGUI is a native Graphical User Interface (GUI) for GRASS GIS written in Python -using wxPython library. +using wxPython library.

    SEE ALSO

    - wxGUI components
    - wxGUI module dialogs + wxGUI components, + wxGUI module dialogs, wxGUI toolboxes (menu customization)
    diff --git a/gui/wxpython/docs/wxGUI.iscatt.html b/gui/wxpython/docs/wxGUI.iscatt.html index 85ef25d164d..6e263d2d1fb 100644 --- a/gui/wxpython/docs/wxGUI.iscatt.html +++ b/gui/wxpython/docs/wxGUI.iscatt.html @@ -10,8 +10,8 @@

    DESCRIPTION

    Interactive Scatter Plot Tool allows analyzing group of raster maps. The tool is integrated into Supervised Classification Tool (see the screen shot below). -Also it is possible to launch it from Map Display Window (Analyze map -→ Interactive Scatter Plot Tool). +Also it is possible to launch it from Map Display Window (Analyze map +→ Interactive Scatter Plot Tool). The main idea of the tool is that everything is linked together (scatter plots together and mapwindow with the scatter plots). @@ -39,7 +39,7 @@

    TOOL CONTROLS LAYOUT

    If editing mode is activated (the green polygon tool in toolbar), the areas which were selected -in the scatter plots are highlighted. In the image you can see this area for scatter plot of bands B_6, B_7 inside the ellipse. +in the scatter plots are highlighted. In the image you can see this area for scatter plot of bands B_6, B_7 inside the ellipse. Opacity and color of the selected area can be set in settings. The area corresponds to the active class (in this case clouds). Selected areas are subset of areas, which belongs to the category.

    @@ -69,9 +69,9 @@

    KNOWN ISSUES

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components, - r.rescale
    + r.rescale

    diff --git a/gui/wxpython/docs/wxGUI.modules.html b/gui/wxpython/docs/wxGUI.modules.html index dd9d3e2b34f..7e8125f7443 100644 --- a/gui/wxpython/docs/wxGUI.modules.html +++ b/gui/wxpython/docs/wxGUI.modules.html @@ -1,5 +1,10 @@ + +

    KEYWORDS

    + +general, GUI +

    DESCRIPTION

    GRASS GIS functionality is organized into modules, which are standalone programs @@ -76,7 +81,7 @@

    Current working directory

    It can be changed in wxGUI menu Settings - GRASS working environment - Change working directory, -or by typing cd and pressing Enter in the wxGUI Command console. +or by typing cd and pressing Enter in the wxGUI Command console. If the working directory is changed to a directory where the input files are, then it is enough to specify just the name of the file instead of the full path. @@ -174,7 +179,7 @@

    NOTES

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/docs/wxGUI.nviz.html b/gui/wxpython/docs/wxGUI.nviz.html index aed1c0b0a8e..df62a7a8d23 100644 --- a/gui/wxpython/docs/wxGUI.nviz.html +++ b/gui/wxpython/docs/wxGUI.nviz.html @@ -319,7 +319,7 @@

    Appearance

    • Lighting for adjusting light source
    • -
    • Fringe for drawing fringes +
    • Fringe for drawing fringes
    • Decorations to display north arrow and scale bar

    @@ -386,7 +386,7 @@

    NOTE

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    @@ -395,10 +395,8 @@

    SEE ALSO

    (especially various video tutorials). -

    - +

    Command-line module m.nviz.image. -

    AUTHORS

    diff --git a/gui/wxpython/docs/wxGUI.toolboxes.html b/gui/wxpython/docs/wxGUI.toolboxes.html index 4920fe07c3f..8679a8eab31 100644 --- a/gui/wxpython/docs/wxGUI.toolboxes.html +++ b/gui/wxpython/docs/wxGUI.toolboxes.html @@ -1,5 +1,10 @@ + +

    KEYWORDS

    + +general, GUI +

    DESCRIPTION

    The Toolboxes is a way to customize items in wxGUI @@ -7,30 +12,30 @@

    DESCRIPTION

      -
    • hide unused menu items in menu (e.g. Imagery, Database) or submenu (e.g. Wildfire modeling) -
    • change order of menu items and subitems -
    • add new menu items (e.g. Temporal) -
    • add addons modules -
    • add your own modules +
    • hide unused menu items in menu (e.g. Imagery, Database) or submenu (e.g. Wildfire modeling)
    • +
    • change order of menu items and subitems
    • +
    • add new menu items (e.g. Temporal)
    • +
    • add addons modules
    • +
    • add your own modules

    -Toolboxes are configured through two XML files (main_menu.xml and -toolboxes.xml) located in your user home -GRASS directory, subdirectory toolboxes - ($HOME/.grass8/toolboxes/ on UNIX). +Toolboxes are configured through two XML files (main_menu.xml and +toolboxes.xml) located in your user home +GRASS directory, subdirectory toolboxes + ($HOME/.grass8/toolboxes/ on UNIX). Currently, there is no GUI front-end for toolboxes, however only simple editing of text files is needed.

    -

    Brief description of file main_menu.xml

    +

    Brief description of file main_menu.xml

    This file represents the main menu (File, Settings, Raster, ...). By modifying this file you show and hide menu items which are -represented by subtoolbox tag.

    +represented by subtoolbox tag.

    -

    Tag user-toolboxes-list is interpreted as a menu containing a list of all user-defined toolboxes. If not needed it can be removed.

    +

    Tag user-toolboxes-list is interpreted as a menu containing a list of all user-defined toolboxes. If not needed it can be removed.

    -

    Following lines can be copied to .grass8/toolboxes/main_menu.xml +

    Following lines can be copied to .grass8/toolboxes/main_menu.xml and by removing, adding or reordering lines users can change the main menu items. See further examples.

    @@ -52,16 +57,16 @@

    Brief description of file main_menu.xml

    </toolbox>
    -

    Brief description of file toolboxes.xml

    +

    Brief description of file toolboxes.xml

    This file contains structure and description of individual toolboxes. Note that both Raster and e.g. Query raster maps are individual toolboxes although one contains the other. -Tag toolbox contains subtoolbox tags +Tag toolbox contains subtoolbox tags which are defined later in the file. These nested toolboxes are linked -through name attribute.

    +through name attribute.

    -

    Apart from subtoolbox tag, tag toolbox can contain individual items (modules) +

    Apart from subtoolbox tag, tag toolbox can contain individual items (modules) and separators (for visual separation in the menu tree).

    @@ -94,9 +99,9 @@ 

    Brief description of file toolboxes.xml

    To redefine a toolbox (or use it as a template), -copy specific part of file grass7/gui/wxpython/xml/toolboxes.xml +copy specific part of file grass7/gui/wxpython/xml/toolboxes.xml from GRASS installation to a new file in user home -(.grass8/toolboxes/toolboxes.xml) and edit it. +(.grass8/toolboxes/toolboxes.xml) and edit it. Rename this new toolbox.

    EXAMPLES

    @@ -104,7 +109,7 @@

    EXAMPLES

    Hiding menu items

    If we are for example working only with raster data, we can hide menu items Vector and Database. -The file main_menu.xml then contains the following lines +The file main_menu.xml then contains the following lines where we omitted the two toolboxes:

    @@ -129,7 +134,7 @@

    Creating custom toolbox

    In this example we create a new toolbox Favorites containing existing GRASS module and toolbox, custom module created by the user and addon module. -The toolboxes.xml file contains following lines:

    +The toolboxes.xml file contains following lines:

     <?xml version="1.0" encoding="UTF-8"?>
    @@ -157,7 +162,7 @@ 

    Creating custom toolbox

    Optionally, we can add this toolbox to the main menu items. -The main_menu.xml file contains following lines:

    +The main_menu.xml file contains following lines:

     <?xml version="1.0" encoding="UTF-8"?>
    @@ -178,7 +183,7 @@ 

    Creating custom toolbox

    </toolbox>
    -

    If we have user-toolboxes-list tag in the main_menu.xml file, +

    If we have user-toolboxes-list tag in the main_menu.xml file, our custom toolbox will be listed in the automatically added Toolboxes main menu item. The screenshot shows the resulting menu:

    @@ -188,13 +193,13 @@

    Creating custom toolbox

    NOTES

    After the first start of wxGUI with custom toolboxes, -.grass/toolboxes directory will contain file -menudata.xml which is auto-generated and should not be edited.

    +.grass/toolboxes directory will contain file +menudata.xml which is auto-generated and should not be edited.

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/docs/wxGUI.vnet.html b/gui/wxpython/docs/wxGUI.vnet.html index cad3fe0b9cc..f6be4446e3f 100644 --- a/gui/wxpython/docs/wxGUI.vnet.html +++ b/gui/wxpython/docs/wxGUI.vnet.html @@ -8,7 +8,7 @@

    KEYWORDS

    DESCRIPTION

    Vector Network Analysis Tool is graphical front-end -for v.net* modules. It allows perform network analysis +for v.net* modules. It allows perform network analysis directly in wxGUI without need to use command line. The tool can be launched from Layer Manager menu Vector → Network analysis → Vector network analysis @@ -89,7 +89,7 @@

    KNOWN ISSUES

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/gcp/g.gui.gcp.html b/gui/wxpython/gcp/g.gui.gcp.html index 4585de25674..97713d9a763 100644 --- a/gui/wxpython/gcp/g.gui.gcp.html +++ b/gui/wxpython/gcp/g.gui.gcp.html @@ -26,11 +26,11 @@

    DESCRIPTION

    manipulate and analyze GCPs are provided in the toolbar. This panel can be moved out of the GCP manager window by either dragging with the caption or by clicking on the pin button on the right in the caption. - This panel can also be placed below the map displays by dragging. + This panel can also be placed below the map displays by dragging.
  • The two panels in the lower part are used for map and GCP display, the left pane showing a map from the source project and the right pane showing a reference map from the target project. Numbered Ground - Control Points are shown on both map displays. + Control Points are shown on both map displays.
  • @@ -50,7 +50,7 @@

    Components of the GCP Manager

    List of ground control points

    The list of Ground Control Points can be sorted by clicking on a column -header. Clicking on a cloumn header will sort the GCPs ascending, a +header. Clicking on a column header will sort the GCPs ascending, a second click on the same column will sort the GCPs descending. Overall RMS error and individual RMS errors of all points are often improved if the GCP with the highest RMS error is adjusted. Individual coordinates @@ -292,7 +292,7 @@

    GCP Map Display Statusbar

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/gcp/g.gui.gcp.py b/gui/wxpython/gcp/g.gui.gcp.py index 12f0a152090..2201b7c2280 100755 --- a/gui/wxpython/gcp/g.gui.gcp.py +++ b/gui/wxpython/gcp/g.gui.gcp.py @@ -29,7 +29,7 @@ # %end """ -Module to run GCP management tool as stadalone application. +Module to run GCP management tool as standalone application. @author Vaclav Petras (standalone module) """ diff --git a/gui/wxpython/gcp/manager.py b/gui/wxpython/gcp/manager.py index 74ede7189e9..43bbfdc1b8e 100644 --- a/gui/wxpython/gcp/manager.py +++ b/gui/wxpython/gcp/manager.py @@ -23,53 +23,58 @@ @author Original author Michael Barton @author Original version improved by Martin Landa -@author Rewritten by Markus Metz redesign georectfier -> GCP Manage +@author Rewritten by Markus Metz redesign georectifier -> GCP Manage @author Support for GraphicsSet added by Stepan Turek (2012) """ +from __future__ import annotations + import os -import sys import shutil +import sys from copy import copy +from typing import TYPE_CHECKING import wx -from wx.lib.mixins.listctrl import ColumnSorterMixin, ListCtrlAutoWidthMixin import wx.lib.colourselect as csel - from core import globalvar +from wx.lib.mixins.listctrl import ColumnSorterMixin, ListCtrlAutoWidthMixin -if globalvar.wxPythonPhoenix: +if globalvar.wxPythonPhoenix or TYPE_CHECKING: from wx import adv as wiz else: from wx import wizard as wiz import grass.script as gs +# isort: split from core import utils +from core.gcmd import GError, GMessage, GWarning, RunCommand +from core.giface import Notification from core.render import Map -from gui_core.gselect import Select, LocationSelect, MapsetSelect -from gui_core.dialogs import GroupDialog -from gui_core.mapdisp import FrameMixin -from core.gcmd import RunCommand, GMessage, GError, GWarning from core.settings import UserSettings from gcp.mapdisplay import MapPanel -from core.giface import Notification +from gui_core.dialogs import GroupDialog +from gui_core.gselect import LocationSelect, MapsetSelect, Select +from gui_core.mapdisp import FrameMixin from gui_core.wrap import ( - SpinCtrl, + BitmapFromImage, Button, - StaticText, - StaticBox, CheckListBox, - TextCtrl, - Menu, - ListCtrl, - BitmapFromImage, CheckListCtrlMixin, + ListCtrl, + Menu, + SpinCtrl, + StaticBox, + StaticText, + TextCtrl, ) - from location_wizard.wizard import GridBagSizerTitledPage as TitledPage +if TYPE_CHECKING: + from wx.adv import WizardEvent + # # global variables # @@ -510,7 +515,7 @@ def OnMapset(self, event): if not wx.FindWindowById(wx.ID_FORWARD).IsEnabled(): wx.FindWindowById(wx.ID_FORWARD).Enable(True) - def OnPageChanging(self, event=None): + def OnPageChanging(self, event: WizardEvent | None = None) -> None: if event.GetDirection() and (self.xylocation == "" or self.xymapset == ""): GMessage( _( @@ -524,7 +529,7 @@ def OnPageChanging(self, event=None): self.parent.SetSrcEnv(self.xylocation, self.xymapset) - def OnEnterPage(self, event=None): + def OnEnterPage(self, event: WizardEvent | None = None) -> None: if self.xylocation == "" or self.xymapset == "": wx.FindWindowById(wx.ID_FORWARD).Enable(False) else: @@ -684,7 +689,7 @@ def OnVGroup(self, event): def OnExtension(self, event): self.extension = self.ext_txt.GetValue() - def OnPageChanging(self, event=None): + def OnPageChanging(self, event: WizardEvent | None = None) -> None: if event.GetDirection() and self.xygroup == "": GMessage( _("You must select a valid image/map group in order to continue"), @@ -701,7 +706,7 @@ def OnPageChanging(self, event=None): event.Veto() return - def OnEnterPage(self, event=None): + def OnEnterPage(self, event: WizardEvent | None = None) -> None: global maptype self.groupList = [] @@ -888,7 +893,7 @@ def OnTgtVectSelection(self, event): tgt_map["vector"] = self.tgtvectselection.GetValue() - def OnPageChanging(self, event=None): + def OnPageChanging(self, event: WizardEvent | None = None) -> None: global src_map, tgt_map if event.GetDirection() and (src_map == ""): @@ -900,7 +905,7 @@ def OnPageChanging(self, event=None): self.parent.SwitchEnv("target") - def OnEnterPage(self, event=None): + def OnEnterPage(self, event: WizardEvent | None = None) -> None: global maptype, src_map, tgt_map self.srcselection.SetElementList(maptype) @@ -995,10 +1000,11 @@ def GetWebServiceLayers(self, ltype=("wms"), name=None): } :return: None when web service map layer name doesn't exist """ - layers = {} - for layer in self.parent._giface.GetLayerList(): - if layer.type in ltype: - layers[str(layer)] = {"type": layer.type, "cmd": layer.cmd} + layers = { + str(layer): {"type": layer.type, "cmd": layer.cmd} + for layer in self.parent._giface.GetLayerList() + if layer.type in ltype + } if name: return layers.get(name) return layers @@ -1026,6 +1032,7 @@ def __init__( Map=None, lmgr=None, ): + # pylint: disable=super-init-not-called; See InitMapDisplay() self.grwiz = grwiz # GR Wizard self._giface = giface @@ -1053,10 +1060,10 @@ def __init__( # register data structures for drawing GCP's # self.pointsToDrawTgt = self.TgtMapWindow.RegisterGraphicsToDraw( - graphicsType="point", setStatusFunc=self.SetGCPSatus + graphicsType="point", setStatusFunc=self.SetGCPStatus ) self.pointsToDrawSrc = self.SrcMapWindow.RegisterGraphicsToDraw( - graphicsType="point", setStatusFunc=self.SetGCPSatus + graphicsType="point", setStatusFunc=self.SetGCPStatus ) # connect to the map windows signals @@ -1401,16 +1408,15 @@ def SetSettings(self): font = self.GetFont() font.SetPointSize(int(spx) + 2) - textProp = {} - textProp["active"] = True - textProp["font"] = font + textProp = {"active": True, "font": font} + self.pointsToDrawSrc.SetPropertyVal("text", textProp) self.pointsToDrawTgt.SetPropertyVal("text", copy(textProp)) # overwrite result map self.overwrite = UserSettings.Get(group="gcpman", key="map", subkey="overwrite") - def SetGCPSatus(self, item, itemIndex): + def SetGCPStatus(self, item, itemIndex): """Before GCP is drawn, decides it's colour and whether it will be drawn. """ @@ -1663,7 +1669,7 @@ def ReloadGCPs(self, event): targetMapWin.UpdateMap(render=False, renderVector=False) def OnFocus(self, event): - # TODO: it is here just to remove old or obsolete beavior of base class + # TODO: it is here just to remove old or obsolete behavior of base class # gcp/MapPanel? # self.grwiz.SwitchEnv('source') pass @@ -2886,11 +2892,11 @@ def __init__( size=wx.DefaultSize, style=wx.DEFAULT_DIALOG_STYLE, ): - wx.Dialog.__init__(self, parent, id, title, pos, size, style) """ Dialog to set profile text options: font, title and font size, axis labels and font size """ + wx.Dialog.__init__(self, parent, id, title, pos, size, style) # # initialize variables # diff --git a/gui/wxpython/gmodeler/canvas.py b/gui/wxpython/gmodeler/canvas.py index bfe5f7a90af..642f007e273 100644 --- a/gui/wxpython/gmodeler/canvas.py +++ b/gui/wxpython/gmodeler/canvas.py @@ -329,10 +329,10 @@ def OnRightClick(self, x, y, keys=0, attachment=0): self.frame.Bind(wx.EVT_MENU, self.OnEnable, id=self.popupID["enable"]) if isinstance(shape, (ModelAction, ModelComment)): popupMenu.AppendSeparator() - if isinstance(shape, ModelAction): - popupMenu.Append(self.popupID["label"], _("Set label")) - self.frame.Bind(wx.EVT_MENU, self.OnSetLabel, id=self.popupID["label"]) - if isinstance(shape, (ModelAction, ModelComment)): + if isinstance(shape, ModelAction): + popupMenu.Append(self.popupID["label"], _("Set label")) + self.frame.Bind(wx.EVT_MENU, self.OnSetLabel, id=self.popupID["label"]) + popupMenu.Append(self.popupID["comment"], _("Set comment")) self.frame.Bind(wx.EVT_MENU, self.OnSetComment, id=self.popupID["comment"]) @@ -440,12 +440,8 @@ def _onSelectShape(self, shape, append=False): shape.Select(False, dc) else: shapeList = canvas.GetDiagram().GetShapeList() - toUnselect = [] - if not append: - for s in shapeList: - if s.Selected(): - toUnselect.append(s) + toUnselect = [s for s in shapeList if s.Selected()] if not append else [] shape.Select(True, dc) diff --git a/gui/wxpython/gmodeler/dialogs.py b/gui/wxpython/gmodeler/dialogs.py index c8dd7509e74..1f8ef0ffc91 100644 --- a/gui/wxpython/gmodeler/dialogs.py +++ b/gui/wxpython/gmodeler/dialogs.py @@ -11,7 +11,7 @@ - dialogs::ModelLoopDialog - dialogs::ModelConditionDialog - dialogs::ModelListCtrl - - dialogs::ValiableListCtrl + - dialogs::VariableListCtrl - dialogs::ItemListCtrl - dialogs::ItemCheckListCtrl @@ -978,9 +978,7 @@ def Populate(self, data): bId = action.GetBlockId() bId = _("No") if not bId else _("Yes") options = action.GetParameterizedParams() - params = [] - for f in options["flags"]: - params.append("-{0}".format(f["name"])) + params = ["-{0}".format(f["name"]) for f in options["flags"]] for p in options["params"]: params.append(p["name"]) diff --git a/gui/wxpython/gmodeler/g.gui.gmodeler.html b/gui/wxpython/gmodeler/g.gui.gmodeler.html index a840768abeb..9a31c63c85c 100644 --- a/gui/wxpython/gmodeler/g.gui.gmodeler.html +++ b/gui/wxpython/gmodeler/g.gui.gmodeler.html @@ -46,7 +46,7 @@

    DESCRIPTION

    Main dialog

    The Graphical Modeler can be launched from the Layer Manager menu -File -> Graphical modeler or from the main +File -> Graphical modeler or from the main toolbar icon. It's also available as stand-alone module g.gui.gmodeler. @@ -107,15 +107,15 @@

    Components of models

    Different model elements are shown in the figures below.
      -
    • (A) raster data: raster -
    • (B) relation: relation -
    • (C) GRASS module: module -
    • (D) loop: loop -
    • (E) database table: db -
    • (F) 3D raster data: raster3D -
    • (G) vector data: vector -
    • (H) disabled GRASS module: module -
    • (I) comment: comment +
    • (A) raster data: raster
    • +
    • (B) relation: relation
    • +
    • (C) GRASS module: module
    • +
    • (D) loop: loop
    • +
    • (E) database table: db
    • +
    • (F) 3D raster data: raster3D
    • +
    • (G) vector data: vector
    • +
    • (H) disabled GRASS module: module
    • +
    • (I) comment: comment
    @@ -152,8 +152,8 @@

    Components of models

    EXAMPLE

    -In this example the zipcodes_wake vector data and the -elev_state_500m raster data from the North Carolina +In this example the zipcodes_wake vector data and the +elev_state_500m raster data from the North Carolina sample dataset (original raster and vector data) are used to calculate average elevation for every @@ -209,7 +209,7 @@

    Defining the workflow in the Graphical Modeler

    Managing model parameters

    All used modules can be parameterized in the model. That causes launching the dialog with input options for model after the model is run. In this example, -input layers (zipcodes_wake vector map and elev_state_500m +input layers (zipcodes_wake vector map and elev_state_500m raster map) are parameterized. Parameterized elements show their diagram border slightly thicker than those of unparameterized elements. @@ -260,7 +260,7 @@

    Managing model parameters

    Managing model properties

    When the user wants to run the model again with the same data or the same names, it is -necessary to use --overwrite option. It will cause maps with identical +necessary to use --overwrite option. It will cause maps with identical names to be overwritten. Instead of setting it for every module separately it is handy to change the Model Property settings globally. This dialog includes also metadata settings, where model name, model description @@ -283,9 +283,9 @@

    Defining variables

    Then it is not necessary to set any parameters for input data. The dialog with variable settings is automatically displayed after the model is run. So, instead of model parameters -(e.g. r.import a v.import, see the Figure +(e.g. r.import a v.import, see the Figure Run model dialog above) -there are Variables. +there are Variables.
    @@ -295,13 +295,13 @@

    Defining variables

    -The key point is the usage of % before the substituting variable and -settings in the Variables dialog. For example, in the case of a model variable -raster that points to an input file path and which value is required to be +The key point is the usage of % before the substituting variable and +settings in the Variables dialog. For example, in the case of a model variable +raster that points to an input file path and which value is required to be used as one of inputs for a particular model, it should be specified in the -Variables dialog with its respective name (raster), data type, +Variables dialog with its respective name (raster), data type, default value and description. Then it should be set in the module dialog as -input called %raster. +input called %raster.

    @@ -320,13 +320,13 @@

    Defining variables

    Saving the model file

    Finally, the model settings can be stored as a GRASS GIS Model file with -*.gxm extension. The advantage is that it can be shared as a +*.gxm extension. The advantage is that it can be shared as a reusable workflow that may be run also by other users with different data.

    For example, this model can later be used to calculate the average precipitation -for every administrative region in Slovakia using the precip raster data from +for every administrative region in Slovakia using the precip raster data from Slovakia precipitation dataset and administration boundaries of Slovakia from Slovak Geoportal @@ -335,7 +335,7 @@

    Saving the model file

    Handling intermediate data

    There can be some data in a model that did not exist before the process and that it is not worth it to maintain after the process executes. They can -be described as being Intermediate by single clicking using the right +be described as being Intermediate by single clicking using the right mouse button, see figure below. All such data should be deleted following model completion. The boundary of intermediate component is dotted line. @@ -348,7 +348,7 @@

    Handling intermediate data

    Using the Script editor

    By using the Script editor in the Graphical Modeler, the user can add Python code and then -run it with Run button or just save it as a Python script *.py. +run it with Run button or just save it as a Python script *.py. The result is shown in the Figure below:
    @@ -388,21 +388,21 @@

    Using the Script editor

    By default GRASS script package API is used -(grass.script.core.run_command()). This can be changed in the +(grass.script.core.run_command()). This can be changed in the settings. Alternatively also PyGRASS API is supported -(grass.pygrass.modules.Module). +(grass.pygrass.modules.Module).

    Defining loops

    In the example below the MODIS MOD13Q1 (NDVI) satellite data products are used in a loop. The original data are stored as coded integer values that need to be multiplied by the -value 0.0001 to represent real ndvi values. Moreover, GRASS GIS -provides a predefined color table called ndvi to represent ndvi data. +value 0.0001 to represent real ndvi values. Moreover, GRASS GIS +provides a predefined color table called ndvi to represent ndvi data. In this case it is not necessary to work with every image separately.
    The Graphical Modeler is an appropriate tool to -process data in an effective way using loop and variables (%map for a -particular MODIS image in mapset and %ndvi for original data name suffix). +process data in an effective way using loop and variables (%map for a +particular MODIS image in mapset and %ndvi for original data name suffix). After the loop component is added to model, it is necessary to define series of maps with required settings of map type, mapset, etc. @@ -460,7 +460,7 @@

    Defining loops

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/gmodeler/model.py b/gui/wxpython/gmodeler/model.py index 15c4eaa8cd4..1f85af66ded 100644 --- a/gui/wxpython/gmodeler/model.py +++ b/gui/wxpython/gmodeler/model.py @@ -100,12 +100,7 @@ def GetItems(self, objType=None): if not objType: return self.items - result = [] - for item in self.items: - if isinstance(item, objType): - result.append(item) - - return result + return [item for item in self.items if isinstance(item, objType)] def GetItem(self, aId, objType=None): """Get item of given id @@ -161,7 +156,7 @@ def ReorderItems(self, idxList): self.canvas.parent.DefineCondition(mo) def Normalize(self): - # check for inconsistecies + # check for inconsistencies for idx in range(1, len(self.items)): if not self.items[idx].GetBlock() and isinstance( self.items[idx - 1], ModelLoop @@ -325,7 +320,8 @@ def LoadModel(self, filename): try: gxmXml = ProcessModelFile(ET.parse(filename)) except Exception as e: - raise GException("{}".format(e)) + msg = "{}".format(e) + raise GException(msg) if self.canvas: win = self.canvas.parent @@ -525,7 +521,7 @@ def Validate(self): def _substituteFile(self, item, params=None, checkOnly=False): """Substitute variables in command file inputs - :param bool checkOnly: tuble - True to check variable, don't touch files + :param bool checkOnly: True to check variable, don't touch files :return: list of undefined variables """ @@ -682,11 +678,12 @@ def Run(self, log, onDone, parent=None): GError(parent=parent, message="\n".join(err)) return - err = [] - for key, item in params.items(): - for p in item["params"]: - if p.get("value", "") == "": - err.append((key, p.get("name", ""), p.get("description", ""))) + err = [ + (key, p.get("name", ""), p.get("description", "")) + for key, item in params.items() + for p in item["params"] + if p.get("value", "") == "" + ] if err: GError( parent=parent, @@ -770,7 +767,7 @@ def Run(self, log, onDone, parent=None): p["value"] = "" def DeleteIntermediateData(self, log): - """Detele intermediate data""" + """Delete intermediate data""" rast, vect, rast3d, msg = self.GetIntermediateData() if rast: @@ -989,11 +986,7 @@ def GetBlockId(self): :return: list of ids """ - ret = [] - for mo in self.inBlock: - ret.append(mo.GetId()) - - return ret + return [mo.GetId() for mo in self.inBlock] class ModelAction(ModelObject, ogl.DividedShape): @@ -1408,20 +1401,14 @@ def OnDraw(self, dc): def GetLog(self, string=True): """Get logging info""" - name = [] - for rel in self.GetRelations(): - name.append(rel.GetLabel()) + name = [rel.GetLabel() for rel in self.GetRelations()] if name: return "/".join(name) + "=" + self.value + " (" + self.prompt + ")" return self.value + " (" + self.prompt + ")" def GetLabel(self): """Get list of names""" - name = [] - for rel in self.GetRelations(): - name.append(rel.GetLabel()) - - return name + return [rel.GetLabel() for rel in self.GetRelations()] def GetPrompt(self): """Get prompt""" @@ -1527,9 +1514,7 @@ def _setPen(self): def SetLabel(self): """Update text""" self.ClearText() - name = [] - for rel in self.GetRelations(): - name.append(rel.GetLabel()) + name = [rel.GetLabel() for rel in self.GetRelations()] self.AddText("/".join(name)) if self.value: self.AddText(self.value) @@ -1550,7 +1535,8 @@ def GetDisplayCmd(self): elif self.prompt == "vector": cmd.append("d.vect") else: - raise GException("Unsupported display prompt: {}".format(self.prompt)) + msg = "Unsupported display prompt: {}".format(self.prompt) + raise GException(msg) cmd.append("map=" + self.value) @@ -1793,12 +1779,7 @@ def Update(self): def GetItems(self, items): """Get sorted items by id""" - result = [] - for item in items: - if item.GetId() in self.itemIds: - result.append(item) - - return result + return [item for item in items if item.GetId() in self.itemIds] def SetItems(self, items): """Set items (id)""" @@ -2672,7 +2653,7 @@ def _getParamName(self, parameter_name, item): @staticmethod def _getModuleNickname(item): return "{module_name}{module_id}".format( - module_name=re.sub("[^a-zA-Z]+", "", item.GetLabel()), + module_name=re.sub(r"[^a-zA-Z]+", "", item.GetLabel()), module_id=item.GetId(), ) @@ -3729,12 +3710,8 @@ def _createPage(self, name, params): def GetErrors(self): """Check for errors, get list of messages""" - errList = [] - for task in self.tasks: - errList += task.get_cmd_error() - - return errList + return [task.get_cmd_error() for task in self.tasks] def DeleteIntermediateData(self) -> bool: - """Check if to detele intermediate data""" + """Check if to delete intermediate data""" return bool(self.interData.IsShown() and self.interData.IsChecked()) diff --git a/gui/wxpython/gmodeler/toolbars.py b/gui/wxpython/gmodeler/toolbars.py index ca6a75b4804..4e7acf500b0 100644 --- a/gui/wxpython/gmodeler/toolbars.py +++ b/gui/wxpython/gmodeler/toolbars.py @@ -24,7 +24,7 @@ class ModelerToolbar(BaseToolbar): - """Graphical modeler toolbaro (see gmodeler.py)""" + """Graphical modeler toolbar (see gmodeler.py)""" def __init__(self, parent): BaseToolbar.__init__(self, parent) diff --git a/gui/wxpython/gui_core/dialogs.py b/gui/wxpython/gui_core/dialogs.py index 27ce4523a76..ca412808a57 100644 --- a/gui/wxpython/gui_core/dialogs.py +++ b/gui/wxpython/gui_core/dialogs.py @@ -1114,10 +1114,7 @@ def OnRemoveLayer(self, event): def GetLayers(self): """Get layers""" if self.edit_subg: - layers = [] - for maps, sel in self.subgmaps.items(): - if sel: - layers.append(maps) + layers = [maps for maps, sel in self.subgmaps.items() if sel] else: layers = self.gmaps[:] @@ -1342,8 +1339,7 @@ def GetSelectedGroup(self): def GetGroupLayers(self, group, subgroup=None): """Get layers in group""" - kwargs = {} - kwargs["group"] = group + kwargs = {"group": group} if subgroup: kwargs["subgroup"] = subgroup @@ -1467,7 +1463,7 @@ def _addApplyButton(self): """ def _fullyQualifiedNames(self): - """Adds CheckBox which determines is fully qualified names are retuned.""" + """Adds CheckBox which determines if fully qualified names are returned.""" self.fullyQualified = wx.CheckBox( parent=self, id=wx.ID_ANY, label=_("Use fully-qualified map names") ) @@ -2139,9 +2135,10 @@ def _layout(self): mainSizer.Add(btnSizer, proportion=0, flag=wx.EXPAND | wx.ALL, border=5) # show panel with the largest number of images and fit size - count = [] - for folder in os.listdir(self.symbolPath): - count.append(len(os.listdir(os.path.join(self.symbolPath, folder)))) + count = [ + len(os.listdir(os.path.join(self.symbolPath, folder))) + for folder in os.listdir(self.symbolPath) + ] index = count.index(max(count)) self.folderChoice.SetSelection(index) @@ -2196,9 +2193,7 @@ def _createSymbolPanels(self, parent): def _getSymbols(self, path): # we assume that images are in subfolders (1 level only) - imageList = [] - for image in os.listdir(path): - imageList.append(os.path.join(path, image)) + imageList = [os.path.join(path, image) for image in os.listdir(path)] return sorted(imageList) diff --git a/gui/wxpython/gui_core/forms.py b/gui/wxpython/gui_core/forms.py index 7172835f001..389ae6f2c30 100644 --- a/gui/wxpython/gui_core/forms.py +++ b/gui/wxpython/gui_core/forms.py @@ -196,11 +196,7 @@ def run(self): map = pMap.get("value", "") if pMap else None # avoid running db.describe several times - cparams = {} - cparams[map] = { - "dbInfo": None, - "layers": None, - } + cparams = {map: {"dbInfo": None, "layers": None}} # update reference widgets for uid in p["wxId-bind"]: @@ -586,8 +582,10 @@ def __init__( self.btn_cancel.Bind(wx.EVT_BUTTON, self.OnCancel) # bind closing to ESC and CTRL+Q self.Bind(wx.EVT_MENU, self.OnCancel, id=wx.ID_CANCEL) - accelTableList = [(wx.ACCEL_NORMAL, wx.WXK_ESCAPE, wx.ID_CANCEL)] - accelTableList.append((wx.ACCEL_CTRL, ord("Q"), wx.ID_CANCEL)) + accelTableList = [ + (wx.ACCEL_NORMAL, wx.WXK_ESCAPE, wx.ID_CANCEL), + (wx.ACCEL_CTRL, ord("Q"), wx.ID_CANCEL), + ] # TODO: bind Ctrl-t for tile windows here (trac #2004) if self.get_dcmd is not None: # A callback has been set up @@ -1802,10 +1800,7 @@ def __init__(self, parent, giface, task, id=wx.ID_ANY, frame=None, *args, **kwar value = self._getValue(p) if prompt == "layer": - if p.get("element", "layer") == "layer_all": - all = True - else: - all = False + all = bool(p.get("element", "layer") == "layer_all") if p.get("age", "old") == "old": win = gselect.LayerSelect( parent=which_panel, all=all, default=p["default"] @@ -2414,15 +2409,9 @@ def OnCheckItem(index=None, flag=None, event=None): pSqlWhere.append(p) # collect ids - pColumnIds = [] - for p in pColumn: - pColumnIds += p["wxId"] - pLayerIds = [] - for p in pLayer: - pLayerIds += p["wxId"] - pSqlWhereIds = [] - for p in pSqlWhere: - pSqlWhereIds += p["wxId"] + pColumnIds = [p["wxId"] for p in pColumn] + pLayerIds = [p["wxId"] for p in pLayer] + pSqlWhereIds = [p["wxId"] for p in pSqlWhere] # set wxId-bindings if pMap: @@ -2840,9 +2829,7 @@ def OnCheckBoxMulti(self, event): myIndex = p["wxId"].index(me) # Unpack current value list - currentValues = {} - for isThere in theParam.get("value", "").split(","): - currentValues[isThere] = 1 + currentValues = dict.fromkeys(theParam.get("value", "").split(","), 1) theValue = theParam["values"][myIndex] if event.IsChecked(): @@ -2851,10 +2838,7 @@ def OnCheckBoxMulti(self, event): del currentValues[theValue] # Keep the original order, so that some defaults may be recovered - currentValueList = [] - for v in theParam["values"]: - if v in currentValues: - currentValueList.append(v) + currentValueList = [v for v in theParam["values"] if v in currentValues] # Pack it back theParam["value"] = ",".join(currentValueList) diff --git a/gui/wxpython/gui_core/gselect.py b/gui/wxpython/gui_core/gselect.py index 67b8a1280e2..248d40de073 100644 --- a/gui/wxpython/gui_core/gselect.py +++ b/gui/wxpython/gui_core/gselect.py @@ -1328,8 +1328,7 @@ def _onSelection(self, event): def UpdateItems(self, location, dbase=None): """Update list of mapsets for given location - :param str dbase: path to GIS database (None to use currently - selected) + :param str dbase: path to GIS database (None to use currently selected) :param str location: name of location """ if dbase: @@ -1396,16 +1395,12 @@ def __init__( ftype = "ogr" if ogr else "gdal" - formats = [] - for f in GetFormats()[ftype][srcType].items(): - formats += f + formats = list(GetFormats()[ftype][srcType].items()) self.SetItems(formats) def GetExtension(self, name): """Get file extension by format name""" - formatToExt = {} - formatToExt.update(rasterFormatExtension) - formatToExt.update(vectorFormatExtension) + formatToExt = {**rasterFormatExtension, **vectorFormatExtension} return formatToExt.get(name, "") @@ -1950,9 +1945,7 @@ def _layout(self): def _getExtension(self, name): """Get file extension by format name""" - formatToExt = {} - formatToExt.update(rasterFormatExtension) - formatToExt.update(vectorFormatExtension) + formatToExt = {**rasterFormatExtension, **vectorFormatExtension} return formatToExt.get(name, "") @@ -2222,8 +2215,7 @@ def hasRastSameProjAsLocation(dsn, table=None): :param str dsn: data source name :param str table: PG DB table name, default value is None - :return str: 1 if raster projection match location - projection else 0 + :return str: 1 if raster projection matches location projection, else 0 """ projectionMatch = "0" @@ -2521,10 +2513,8 @@ def _getPGDBtables(self, dsn): def _getPGDBTablesColumnsTypesSql(self, tables): """Get PostGIS DB tables columns data type SQL command - :param list tables: list of PG DB tables with - simple quotes ["'table'", ...] - :return str: SQL string for query all PG DB tables with - columns data types + :param list tables: list of PG DB tables with simple quotes ["'table'", ...] + :return str: SQL string for query all PG DB tables with columns data types """ return f""" SELECT diff --git a/gui/wxpython/gui_core/mapdisp.py b/gui/wxpython/gui_core/mapdisp.py index ea770defe8e..515900959c7 100644 --- a/gui/wxpython/gui_core/mapdisp.py +++ b/gui/wxpython/gui_core/mapdisp.py @@ -285,19 +285,23 @@ def GetProgressBar(self): def GetMap(self): """Returns current map (renderer) instance""" - raise NotImplementedError("GetMap") + msg = self.GetMap.__name__ + raise NotImplementedError(msg) def GetWindow(self): """Returns current map window""" - raise NotImplementedError("GetWindow") + msg = self.GetWindow.__name__ + raise NotImplementedError(msg) def GetWindows(self): """Returns list of map windows""" - raise NotImplementedError("GetWindows") + msg = self.GetWindows.__name__ + raise NotImplementedError(msg) def GetMapToolbar(self): """Returns toolbar with zooming tools""" - raise NotImplementedError("GetMapToolbar") + msg = self.GetMapToolbar.__name__ + raise NotImplementedError(msg) def GetToolbar(self, name): """Returns toolbar if exists and is active, else None.""" @@ -393,7 +397,8 @@ def GetToolbarNames(self): def AddToolbar(self): """Add defined toolbar to the window""" - raise NotImplementedError("AddToolbar") + msg = self.AddToolbar.__name__ + raise NotImplementedError(msg) def RemoveToolbar(self, name, destroy=False): """Removes defined toolbar from the window @@ -419,7 +424,8 @@ def IsPaneShown(self, name): def OnRender(self, event): """Re-render map composition (each map layer)""" - raise NotImplementedError("OnRender") + msg = self.OnRender.__name__ + raise NotImplementedError(msg) def OnEnableDisableRender(self, event): """Enable/disable auto-rendering map composition (each map layer)""" @@ -580,7 +586,7 @@ class DoubleMapPanel(MapPanelBase): It is expected that derived class will call _bindWindowsActivation() when both map windows will be initialized. - Drived class should have method GetMapToolbar() returns toolbar + Derived classes should have method GetMapToolbar() returns toolbar which has methods SetActiveMap() and Enable(). @note To access maps use getters only @@ -605,7 +611,7 @@ def __init__( r""" \a firstMap is set as active (by assign it to \c self.Map). - Derived class should assging to \c self.MapWindow to make one + Derived class should assigning to \c self.MapWindow to make one map window current by default. :param parent: gui parent diff --git a/gui/wxpython/gui_core/menu.py b/gui/wxpython/gui_core/menu.py index 34d6eeffd31..a512b6eea19 100644 --- a/gui/wxpython/gui_core/menu.py +++ b/gui/wxpython/gui_core/menu.py @@ -342,8 +342,7 @@ class RecentFilesMenu: written into the .recent_files file :param obj parent_menu: menu widget instance where be inserted recent files menu on the specified position - :param int pos: position (index) where insert recent files menu in - the parent menu + :param int pos: position (index) where insert recent files menu in the parent menu :param int history_len: the maximum number of file paths written into the .recent_files file to app name group """ diff --git a/gui/wxpython/gui_core/preferences.py b/gui/wxpython/gui_core/preferences.py index 85e5313da0e..73ad6f3d3bd 100644 --- a/gui/wxpython/gui_core/preferences.py +++ b/gui/wxpython/gui_core/preferences.py @@ -2186,8 +2186,7 @@ def OnSetOutputFont(self, event): dlg = wx.FontDialog(self, fontdata) - "FIXME: native font dialog does not initialize with current font" - + # FIXME: native font dialog does not initialize with current font if dlg.ShowModal() == wx.ID_OK: outdata = dlg.GetFontData() font = outdata.GetChosenFont() diff --git a/gui/wxpython/gui_core/query.py b/gui/wxpython/gui_core/query.py index 5e0f734fa6a..bf46df8688e 100644 --- a/gui/wxpython/gui_core/query.py +++ b/gui/wxpython/gui_core/query.py @@ -119,11 +119,10 @@ def ShowContextMenu(self, node): menu = Menu() texts = [] if len(nodes) > 1: - values = [] - for node in nodes: - values.append( - (node.label, node.data[self._colNames[1]] if node.data else "") - ) + values = [ + (node.label, node.data[self._colNames[1]] if node.data else "") + for node in nodes + ] col1 = "\n".join([val[1] for val in values if val[1]]) col2 = "\n".join([val[0] for val in values if val[0]]) table = "\n".join([val[0] + ": " + val[1] for val in values]) @@ -253,8 +252,7 @@ def PrepareQueryResults(coordinates, result): Adds coordinates, improves vector results tree structure. """ - data = [] - data.append({_("east, north"): ", ".join(map(str, coordinates))}) + data = [{_("east, north"): ", ".join(map(str, coordinates))}] for part in result: if "Map" in part: itemText = part["Map"] diff --git a/gui/wxpython/gui_core/toolbars.py b/gui/wxpython/gui_core/toolbars.py index d47a850a6a5..33ca987e142 100644 --- a/gui/wxpython/gui_core/toolbars.py +++ b/gui/wxpython/gui_core/toolbars.py @@ -18,6 +18,7 @@ import platform import os +from itertools import starmap import wx from wx.lib.agw import aui @@ -243,10 +244,7 @@ def EnableAll(self, enable=True): def _getToolbarData(self, data): """Define tool""" - retData = [] - for args in data: - retData.append(self._defineTool(*args)) - return retData + return list(starmap(self._defineTool, data)) def _defineTool( self, name=None, icon=None, handler=None, item=wx.ITEM_NORMAL, pos=-1 @@ -328,7 +326,6 @@ class BaseToolbar(ToolBar): Following code shows how to create new basic toolbar: - class MyToolbar(BaseToolbar): def __init__(self, parent): BaseToolbar.__init__(self, parent) diff --git a/gui/wxpython/gui_core/vselect.py b/gui/wxpython/gui_core/vselect.py index 16bb64b7a57..43c07a3455a 100644 --- a/gui/wxpython/gui_core/vselect.py +++ b/gui/wxpython/gui_core/vselect.py @@ -371,23 +371,21 @@ def OnExportMap(self, event): else: GError(_("Unable to create a new vector map.\n\nReason: %s") % err) - """ - def SetSelectedCat(self, cats): - # allows setting selected vector categories by list of cats (per line) - info = self.QuerySelectedMap() - if 'Category' not in info: - return - - for cat in cats.splitlines(): - tmpDict = {} - tmpDict['Category'] = cat - tmpDict['Map'] = info['Map'] - tmpDict['Layer'] = info['Layer'] - tmpDict['Type'] = '-' - self.AddVecInfo(tmpDict) - - self._draw() - """ + # def SetSelectedCat(self, cats): + # # allows setting selected vector categories by list of cats (per line) + # info = self.QuerySelectedMap() + # if "Category" not in info: + # return + # + # for cat in cats.splitlines(): + # tmpDict = {} + # tmpDict["Category"] = cat + # tmpDict["Map"] = info["Map"] + # tmpDict["Layer"] = info["Layer"] + # tmpDict["Type"] = "-" + # self.AddVecInfo(tmpDict) + # + # self._draw() class VectorSelectHighlighter: @@ -397,6 +395,9 @@ class VectorSelectHighlighter: """ def __init__(self, mapdisp, giface): + """ + :param mapdisp: Map display frame + """ self.qlayer = None self.mapdisp = mapdisp self.giface = giface diff --git a/gui/wxpython/gui_core/widgets.py b/gui/wxpython/gui_core/widgets.py index b787bf23037..40c9e6c5e19 100644 --- a/gui/wxpython/gui_core/widgets.py +++ b/gui/wxpython/gui_core/widgets.py @@ -1052,7 +1052,7 @@ def __init__(self, num_of_params): super().__init__() def _enableDisableBtn(self, enable): - """Enable/Disable buttomn + """Enable/Disable button :param bool enable: Enable/Disable btn """ diff --git a/gui/wxpython/gui_core/wrap.py b/gui/wxpython/gui_core/wrap.py index b971e5e6eab..3a0ca712694 100644 --- a/gui/wxpython/gui_core/wrap.py +++ b/gui/wxpython/gui_core/wrap.py @@ -618,17 +618,11 @@ class DragImage(wx.GenericDragImage if wxPythonPhoenix else wx.DragImage): """Wrapper around wx.DragImage to have more control over the widget on different platforms/wxpython versions""" - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - class PseudoDC(wx.adv.PseudoDC if wxPythonPhoenix else wx.PseudoDC): """Wrapper around wx.PseudoDC to have more control over the widget on different platforms/wxpython versions""" - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - def DrawLinePoint(self, *args, **kwargs): args = convertToInt(argsOrKwargs=args, roundVal=True) kwargs = convertToInt(argsOrKwargs=kwargs, roundVal=True) @@ -671,9 +665,6 @@ class ClientDC(wx.ClientDC): """Wrapper around wx.ClientDC to have more control over the widget on different platforms/wxpython versions""" - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - def GetFullMultiLineTextExtent(self, string, font=None): if wxPythonPhoenix: return super().GetFullMultiLineTextExtent(string, font) diff --git a/gui/wxpython/history/browser.py b/gui/wxpython/history/browser.py index 03e39a884a3..9589cee3f0d 100644 --- a/gui/wxpython/history/browser.py +++ b/gui/wxpython/history/browser.py @@ -326,11 +326,11 @@ def _get_current_region(self): def _get_history_region(self): """Get computational region settings of executed command.""" - history_region = {} - for key, value in self.region_settings.items(): - if self._region_settings_filter(key): - history_region[key] = value - return history_region + return { + key: value + for key, value in self.region_settings.items() + if self._region_settings_filter(key) + } def OnUpdateRegion(self, event): """Set current region to the region of executed command.""" diff --git a/gui/wxpython/iclass/digit.py b/gui/wxpython/iclass/digit.py index 7442c24ab5d..9b9356cee02 100644 --- a/gui/wxpython/iclass/digit.py +++ b/gui/wxpython/iclass/digit.py @@ -17,16 +17,29 @@ """ import wx - +from core.gcmd import GWarning from vdigit.mapwindow import VDigitWindow from vdigit.wxdigit import IVDigit -from vdigit.wxdisplay import DisplayDriver, TYPE_AREA -from core.gcmd import GWarning +from vdigit.wxdisplay import TYPE_AREA, DisplayDriver try: - from grass.lib.gis import G_verbose, G_set_verbose - from grass.lib.vector import * - from grass.lib.vedit import * + from ctypes import pointer + + from grass.lib.gis import G_set_verbose, G_verbose + from grass.lib.vector import ( + Map_info, + Vect_build, + Vect_close, + Vect_copy_map_lines, + Vect_get_area_cat, + Vect_get_num_lines, + Vect_is_3d, + Vect_open_new, + Vect_open_tmp_new, + Vect_open_tmp_update, + Vect_open_update, + ) + from grass.lib.vedit import TYPE_CENTROIDIN, Vedit_delete_areas_cat except ImportError: pass diff --git a/gui/wxpython/iclass/frame.py b/gui/wxpython/iclass/frame.py index b19d8d7d3ac..4bb9e66b6c4 100644 --- a/gui/wxpython/iclass/frame.py +++ b/gui/wxpython/iclass/frame.py @@ -18,17 +18,38 @@ @author Anna Kratochvilova """ -import os import copy +import os import tempfile +from ctypes import byref, pointer import wx -from ctypes import * - try: - from grass.lib.imagery import * - from grass.lib.vector import * + from grass.lib.imagery import ( + I_free_group_ref, + I_free_signatures, + I_iclass_add_signature, + I_iclass_analysis, + I_iclass_create_raster, + I_iclass_free_statistics, + I_iclass_init_group, + I_iclass_init_signatures, + I_iclass_init_statistics, + I_iclass_statistics_set_nstd, + I_iclass_write_signatures, + I_init_group_ref, + I_init_signatures, + IClass_statistics, + Ref, + Signature, + ) + from grass.lib.vector import ( + Vect_area_alive, + Vect_get_map_box, + Vect_get_num_areas, + bound_box, + ) haveIClass = True errMsg = "" @@ -37,37 +58,35 @@ errMsg = _("Loading imagery lib failed.\n%s") % e import grass.script as gs - -from mapdisp import statusbar as sb -from mapdisp.main import StandaloneMapDisplayGrassInterface -from mapwin.buffered import BufferedMapWindow -from vdigit.toolbars import VDigitToolbar -from gui_core.mapdisp import DoubleMapPanel, FrameMixin from core import globalvar +from core.gcmd import GError, GMessage, RunCommand from core.render import Map -from core.gcmd import RunCommand, GMessage, GError +from dbmgr.vinfo import VectorDBInfo +from grass.pydispatch.signal import Signal from gui_core.dialogs import SetOpacityDialog +from gui_core.mapdisp import DoubleMapPanel, FrameMixin from gui_core.wrap import Menu -from dbmgr.vinfo import VectorDBInfo +from mapdisp import statusbar as sb +from mapdisp.main import StandaloneMapDisplayGrassInterface +from mapwin.buffered import BufferedMapWindow +from vdigit.toolbars import VDigitToolbar -from iclass.digit import IClassVDigitWindow, IClassVDigit -from iclass.toolbars import ( - IClassMapToolbar, - IClassMiscToolbar, - IClassToolbar, - IClassMapManagerToolbar, -) -from iclass.statistics import StatisticsData from iclass.dialogs import ( IClassCategoryManagerDialog, - IClassGroupDialog, - IClassSignatureFileDialog, IClassExportAreasDialog, + IClassGroupDialog, IClassMapDialog, + IClassSignatureFileDialog, ) +from iclass.digit import IClassVDigit, IClassVDigitWindow from iclass.plots import PlotPanel - -from grass.pydispatch.signal import Signal +from iclass.statistics import StatisticsData +from iclass.toolbars import ( + IClassMapManagerToolbar, + IClassMapToolbar, + IClassMiscToolbar, + IClassToolbar, +) class IClassMapPanel(DoubleMapPanel): @@ -1483,10 +1502,7 @@ def AddLayerRGB(self, cmd): :param cmd: d.rgb command as a list """ - name = [] - for param in cmd: - if "=" in param: - name.append(param.split("=")[1]) + name = [param.split("=")[1] for param in cmd if "=" in param] name = ",".join(name) self.map.AddLayer( ltype="rgb", diff --git a/gui/wxpython/iclass/g.gui.iclass.html b/gui/wxpython/iclass/g.gui.iclass.html index f1cf34ba897..3ce1bd580e6 100644 --- a/gui/wxpython/iclass/g.gui.iclass.html +++ b/gui/wxpython/iclass/g.gui.iclass.html @@ -82,7 +82,7 @@

    DESCRIPTION

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components, Interactive Scatter Plot Tool
    diff --git a/gui/wxpython/image2target/g.gui.image2target.html b/gui/wxpython/image2target/g.gui.image2target.html index af1aec5a55d..35b9c7023dd 100644 --- a/gui/wxpython/image2target/g.gui.image2target.html +++ b/gui/wxpython/image2target/g.gui.image2target.html @@ -26,11 +26,11 @@

    DESCRIPTION

    manipulate and analyze GCPs are provided in the toolbar. This panel can be moved out of the GCP manager window by either dragging with the caption or by clicking on the pin button on the right in the caption. - This panel can also be placed below the map displays by dragging. + This panel can also be placed below the map displays by dragging.
  • The two panels in the lower part are used for map and GCP display, the left pane showing a map from the source project and the right pane showing a reference map from the target project. Numbered Ground - Control Points are shown on both map displays. + Control Points are shown on both map displays.
  • Components of the GCP Manager

    @@ -49,7 +49,7 @@

    Components of the GCP Manager

    List of ground control points

    The list of Ground Control Points can be sorted by clicking on a column -header. Clicking on a cloumn header will sort the GCPs ascending, a +header. Clicking on a column header will sort the GCPs ascending, a second click on the same column will sort the GCPs descending. Overall RMS error and individual RMS errors of all points are often improved if the GCP with the highest RMS error is adjusted. Individual coordinates @@ -291,7 +291,7 @@

    GCP Map Display Statusbar

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/image2target/g.gui.image2target.py b/gui/wxpython/image2target/g.gui.image2target.py index e2704dc371b..f7370a71adf 100755 --- a/gui/wxpython/image2target/g.gui.image2target.py +++ b/gui/wxpython/image2target/g.gui.image2target.py @@ -32,7 +32,7 @@ """ -Module to run GCP management tool as stadalone application. +Module to run GCP management tool as standalone application. """ import os diff --git a/gui/wxpython/image2target/ii2t_manager.py b/gui/wxpython/image2target/ii2t_manager.py index 939c2fb0fe8..56adf91775b 100644 --- a/gui/wxpython/image2target/ii2t_manager.py +++ b/gui/wxpython/image2target/ii2t_manager.py @@ -23,7 +23,7 @@ @author Original author Michael Barton @author Original version improved by Martin Landa -@author Rewritten by Markus Metz redesign georectfier -> GCP Manage +@author Rewritten by Markus Metz redesign georectifier -> GCP Manage @author Support for GraphicsSet added by Stepan Turek (2012) @author port i.image.2target (v6) to version 7 in 2017 by Yann """ @@ -31,51 +31,54 @@ # TODO: i.ortho.transform has 6 appearances, check each of them and configure # TODO: i.ortho.transform looks for REF_POINTS/CONTROL_POINTS and not POINTS # TODO: CHECK CONTROL_POINTS format and create it for i.ortho.transform to use. - +from __future__ import annotations import os -import sys import shutil +import sys from copy import copy +from typing import TYPE_CHECKING import wx -from wx.lib.mixins.listctrl import ColumnSorterMixin, ListCtrlAutoWidthMixin import wx.lib.colourselect as csel - from core import globalvar +from wx.lib.mixins.listctrl import ColumnSorterMixin, ListCtrlAutoWidthMixin -if globalvar.wxPythonPhoenix: +if globalvar.wxPythonPhoenix or TYPE_CHECKING: from wx import adv as wiz else: from wx import wizard as wiz import grass.script as gs +# isort: split from core import utils +from core.gcmd import GError, GMessage, GWarning, RunCommand +from core.giface import Notification from core.render import Map -from gui_core.gselect import Select, LocationSelect, MapsetSelect -from gui_core.dialogs import GroupDialog -from gui_core.mapdisp import FrameMixin -from core.gcmd import RunCommand, GMessage, GError, GWarning from core.settings import UserSettings from gcp.mapdisplay import MapPanel -from core.giface import Notification +from gui_core.dialogs import GroupDialog +from gui_core.gselect import LocationSelect, MapsetSelect, Select +from gui_core.mapdisp import FrameMixin from gui_core.wrap import ( - SpinCtrl, + BitmapFromImage, Button, - StaticText, - StaticBox, CheckListBox, - TextCtrl, - Menu, - ListCtrl, - BitmapFromImage, CheckListCtrlMixin, + ListCtrl, + Menu, + SpinCtrl, + StaticBox, + StaticText, + TextCtrl, ) - from location_wizard.wizard import GridBagSizerTitledPage as TitledPage +if TYPE_CHECKING: + from wx.adv import WizardEvent + # # global variables # @@ -529,7 +532,7 @@ def OnMapset(self, event): if not wx.FindWindowById(wx.ID_FORWARD).IsEnabled(): wx.FindWindowById(wx.ID_FORWARD).Enable(True) - def OnPageChanging(self, event=None): + def OnPageChanging(self, event: WizardEvent | None = None) -> None: if event.GetDirection() and (self.xylocation == "" or self.xymapset == ""): GMessage( _( @@ -543,7 +546,7 @@ def OnPageChanging(self, event=None): self.parent.SetSrcEnv(self.xylocation, self.xymapset) - def OnEnterPage(self, event=None): + def OnEnterPage(self, event: WizardEvent | None = None) -> None: if self.xylocation == "" or self.xymapset == "": wx.FindWindowById(wx.ID_FORWARD).Enable(False) else: @@ -690,7 +693,7 @@ def OnVGroup(self, event): def OnExtension(self, event): self.extension = self.ext_txt.GetValue() - def OnPageChanging(self, event=None): + def OnPageChanging(self, event: WizardEvent | None = None) -> None: if event.GetDirection() and self.xygroup == "": GMessage( _("You must select a valid image/map group in order to continue"), @@ -707,7 +710,7 @@ def OnPageChanging(self, event=None): event.Veto() return - def OnEnterPage(self, event=None): + def OnEnterPage(self, event: WizardEvent | None = None) -> None: global maptype self.groupList = [] @@ -892,7 +895,7 @@ def OnTgtVectSelection(self, event): tgt_map["vector"] = self.tgtvectselection.GetValue() - def OnPageChanging(self, event=None): + def OnPageChanging(self, event: WizardEvent | None = None) -> None: global src_map, tgt_map if event.GetDirection() and (src_map == ""): @@ -904,7 +907,7 @@ def OnPageChanging(self, event=None): self.parent.SwitchEnv("target") - def OnEnterPage(self, event=None): + def OnEnterPage(self, event: WizardEvent | None = None) -> None: global maptype, src_map, tgt_map self.srcselection.SetElementList(maptype) @@ -1003,6 +1006,7 @@ def __init__( Map=None, lmgr=None, ): + # pylint: disable=super-init-not-called; See InitMapDisplay() self.grwiz = grwiz # GR Wizard self._giface = giface @@ -1030,10 +1034,10 @@ def __init__( # register data structures for drawing GCP's # self.pointsToDrawTgt = self.TgtMapWindow.RegisterGraphicsToDraw( - graphicsType="point", setStatusFunc=self.SetGCPSatus + graphicsType="point", setStatusFunc=self.SetGCPStatus ) self.pointsToDrawSrc = self.SrcMapWindow.RegisterGraphicsToDraw( - graphicsType="point", setStatusFunc=self.SetGCPSatus + graphicsType="point", setStatusFunc=self.SetGCPStatus ) # connect to the map windows signals @@ -1387,13 +1391,12 @@ def SetSettings(self): font = self.GetFont() font.SetPointSize(int(spx) + 2) - textProp = {} - textProp["active"] = True - textProp["font"] = font + textProp = {"active": True, "font": font} + self.pointsToDrawSrc.SetPropertyVal("text", textProp) self.pointsToDrawTgt.SetPropertyVal("text", copy(textProp)) - def SetGCPSatus(self, item, itemIndex): + def SetGCPStatus(self, item, itemIndex): """Before GCP is drawn, decides it's colour and whether it will be drawn. """ @@ -1497,6 +1500,7 @@ def SetGCPData(self, coordtype, coord, mapdisp=None, confirm=False): ) # Get the elevation height from the map given by i.ortho.elev from subprocess import PIPE + from grass.pygrass.modules import Module rwhat = Module( @@ -2835,11 +2839,11 @@ def __init__( size=wx.DefaultSize, style=wx.DEFAULT_DIALOG_STYLE, ): - wx.Dialog.__init__(self, parent, id, title, pos, size, style) """ Dialog to set profile text options: font, title and font size, axis labels and font size """ + wx.Dialog.__init__(self, parent, id, title, pos, size, style) # # initialize variables # diff --git a/gui/wxpython/iscatt/controllers.py b/gui/wxpython/iscatt/controllers.py index 06ca5d0b243..f87569b2698 100644 --- a/gui/wxpython/iscatt/controllers.py +++ b/gui/wxpython/iscatt/controllers.py @@ -505,7 +505,7 @@ class PlotsRenderingManager: """Manages rendering of scatter plot. .. todo:: - still space for optimalization + still space for optimization """ def __init__(self, scatt_mgr, cats_mgr, core): @@ -1133,11 +1133,9 @@ def DeletAllCategories(self): def SetCategory(self, cat, stats): self.cats_mgr.setCategoryAttrs.disconnect(self.SetStatistics) - cats_attr = {} - - for attr in ["name", "color", "nstd"]: - if attr in stats: - cats_attr[attr] = stats[attr] + cats_attr = { + attr: stats[attr] for attr in ["name", "color", "nstd"] if attr in stats + } if cats_attr: self.cats_mgr.SetCategoryAttrs(cat, cats_attr) diff --git a/gui/wxpython/iscatt/core_c.py b/gui/wxpython/iscatt/core_c.py index a6e6ac5c8a0..93f90e6a2ee 100644 --- a/gui/wxpython/iscatt/core_c.py +++ b/gui/wxpython/iscatt/core_c.py @@ -19,7 +19,7 @@ import numpy as np try: - from grass.lib.gis import G_get_window + from grass.lib.gis import G_get_window, struct_Cell_head from grass.lib.imagery import ( SC_SCATT_CONDITIONS, SC_SCATT_DATA, @@ -35,10 +35,9 @@ I_sc_insert_scatt_data, I_scd_init_scatt_data, scdScattData, - struct_Cell_head, - struct_Range, struct_scCats, ) + from grass.lib.raster import struct_Range except ImportError as e: sys.stderr.write(_("Loading ctypes libs failed: %s") % e) diff --git a/gui/wxpython/iscatt/dialogs.py b/gui/wxpython/iscatt/dialogs.py index a2d054b64a4..4dd10aeb832 100644 --- a/gui/wxpython/iscatt/dialogs.py +++ b/gui/wxpython/iscatt/dialogs.py @@ -425,9 +425,7 @@ def __init__( gridSizer = wx.GridBagSizer(vgap=1, hgap=1) row = 0 - setts = {} - setts.update(self.colorsSetts) - setts.update(self.sizeSetts) + setts = {**self.colorsSetts, **self.sizeSetts} settsOrder = [ "sel_pol", diff --git a/gui/wxpython/iscatt/frame.py b/gui/wxpython/iscatt/frame.py index 3addbad2f06..542d8acc2c3 100644 --- a/gui/wxpython/iscatt/frame.py +++ b/gui/wxpython/iscatt/frame.py @@ -243,8 +243,8 @@ def SetBusy(self, busy): def CursorPlotMove(self, x, y, scatt_id): try: - x = int(round(x)) - y = int(round(y)) + x = round(x) + y = round(y) coords = True except TypeError: coords = False diff --git a/gui/wxpython/iscatt/iscatt_core.py b/gui/wxpython/iscatt/iscatt_core.py index 52cecbfb381..1d56ffe88c3 100644 --- a/gui/wxpython/iscatt/iscatt_core.py +++ b/gui/wxpython/iscatt/iscatt_core.py @@ -155,12 +155,12 @@ def UpdateCategoryWithPolygons(self, cat_id, scatts_pols, value): b1, b2 = idScattToidBands(scatt_id, len(self.an_data.GetBands())) b = self.scatts_dt.GetBandsInfo(scatt_id) - region = {} - region["s"] = b["b2"]["min"] - 0.5 - region["n"] = b["b2"]["max"] + 0.5 - - region["w"] = b["b1"]["min"] - 0.5 - region["e"] = b["b1"]["max"] + 0.5 + region = { + "s": b["b2"]["min"] - 0.5, + "n": b["b2"]["max"] + 0.5, + "w": b["b1"]["min"] - 0.5, + "e": b["b1"]["max"] + 0.5, + } arr = self.scatt_conds_dt.GetValuesArr(cat_id, scatt_id) arr = Rasterize(polygon=coords, rast=arr, region=region, value=value) @@ -214,15 +214,16 @@ def SetVectMap(self, vectMap): def SyncWithMap(self): # TODO possible optimization - bbox only of vertex and its two - # neighbours + # neighbors region = self.an_data.GetRegion() - bbox = {} - bbox["maxx"] = region["e"] - bbox["minx"] = region["w"] - bbox["maxy"] = region["n"] - bbox["miny"] = region["s"] + bbox = { + "maxx": region["e"], + "minx": region["w"], + "maxy": region["n"], + "miny": region["s"], + } updated_cats = [] @@ -237,7 +238,7 @@ def SyncWithMap(self): def EditedFeature(self, new_bboxs, new_areas_cats, old_bboxs, old_areas_cats): # TODO possible optimization - bbox only of vertex and its two - # neighbours + # neighbors bboxs = old_bboxs + new_bboxs areas_cats = old_areas_cats + new_areas_cats @@ -551,8 +552,7 @@ def GetScatt(self, scatt_id, cats_ids=None): class ScattPlotsData(ScattPlotsCondsData): - """Data structure for computed points (classes) in scatter plots.\ - """ + """Data structure for computed points (classes) in scatter plots.""" def __init__(self, an_data): self.cats_rasts = {} @@ -727,35 +727,36 @@ def GetCatsRasts(self): return cats_rasts -# not used, using iclass_perimeter algorithm instead of scipy convolve2d -""" -def RasterizePolygon(pol, height, min_h, width, min_w): - - # Joe Kington - # https://stackoverflow.com/questions/3654289/scipy-create-2d-polygon-mask - - #poly_verts = [(1,1), (1,4), (4,4),(4,1), (1,1)] - - nx = width - ny = height - - x, y = np.meshgrid(np.arange(-0.5 + min_w, nx + 0.5 + min_w, dtype=float), - np.arange(-0.5 + min_h, ny + 0.5 + min_h, dtype=float)) - x, y = x.flatten(), y.flatten() - - points = np.vstack((x,y)).T - - p = Path(pol) - grid = p.contains_points(points) - grid = grid.reshape((ny + 1, nx + 1)) - raster = np.zeros((height, width), dtype=np.uint8)#TODO bool - - #TODO shift by 0.5 - B = np.ones((2,2))/4 - raster = convolve2d(grid, B, 'valid') - - return raster -""" +# not used, using iclass_perimeter algorithm instead of scipy convolve2d + +# def RasterizePolygon(pol, height, min_h, width, min_w): +# +# # Joe Kington +# # https://stackoverflow.com/questions/3654289/scipy-create-2d-polygon-mask +# +# # poly_verts = [(1,1), (1,4), (4,4),(4,1), (1,1)] +# +# nx = width +# ny = height +# +# x, y = np.meshgrid( +# np.arange(-0.5 + min_w, nx + 0.5 + min_w, dtype=float), +# np.arange(-0.5 + min_h, ny + 0.5 + min_h, dtype=float), +# ) +# x, y = x.flatten(), y.flatten() +# +# points = np.vstack((x, y)).T +# +# p = Path(pol) +# grid = p.contains_points(points) +# grid = grid.reshape((ny + 1, nx + 1)) +# raster = np.zeros((height, width), dtype=np.uint8) # TODO bool +# +# # TODO shift by 0.5 +# B = np.ones((2, 2)) / 4 +# raster = convolve2d(grid, B, "valid") +# +# return raster def idScattToidBands(scatt_id, n_bands): diff --git a/gui/wxpython/iscatt/plots.py b/gui/wxpython/iscatt/plots.py index e4bc753c82e..1c081958fed 100644 --- a/gui/wxpython/iscatt/plots.py +++ b/gui/wxpython/iscatt/plots.py @@ -175,7 +175,7 @@ def OnRelease(self, event): self.canvas.draw() def OnPress(self, event): - "on button press we will see if the mouse is over us and store some data" + """on button press we will see if the mouse is over us and store some data""" if not event.inaxes: return if self.mode == "zoom_extend": @@ -189,7 +189,7 @@ def OnPress(self, event): self.zoom_rect_coords = None def _stopCategoryEdit(self): - "disconnect all the stored connection ids" + """disconnect all the stored connection ids""" if self.cidpress: self.canvas.mpl_disconnect(self.cidpress) @@ -235,7 +235,7 @@ def Plot(self, cats_order, scatts, ellipses, styles): img = imshow( self.axes, merged_img, - extent=[int(ceil(x)) for x in self.full_extend], + extent=[ceil(x) for x in self.full_extend], origin="lower", interpolation="nearest", aspect="equal", @@ -389,7 +389,7 @@ def OnCanvasLeave(self, event): self.cursorMove.emit(x=None, y=None, scatt_id=self.scatt_id) def PanMotion(self, event): - "on mouse movement" + """on mouse movement""" if self.mode != "pan": return if event.inaxes is None: @@ -513,30 +513,29 @@ def MergeImg(cats_order, scatts, styles, rend_dt, output_queue): else: MergeArrays(merged_img, rend_dt[cat_id]["dt"], styles[cat_id]["opacity"]) - """ - # c_img_a = np.memmap( - # grass.tempfile(), dtype="uint16", mode="w+", shape=shape - # ) - c_img_a = colored_cat.astype("uint16")[:, :, 3] * styles[cat_id]["opacity"] - - # TODO apply strides and there will be no need for loop - # b = as_strided( - # a, - # strides=(0, a.strides[3], a.strides[3], a.strides[3]), - # shape=(3, a.shape[0], a.shape[1]), - # ) - - for i in range(3): - merged_img[:, :, i] = ( - merged_img[:, :, i] * (255 - c_img_a) - + colored_cat[:, :, i] * c_img_a - ) / 255 - merged_img[:, :, 3] = ( - merged_img[:, :, 3] * (255 - c_img_a) + 255 * c_img_a - ) / 255 - - del c_img_a - """ + # # c_img_a = np.memmap( + # # grass.tempfile(), dtype="uint16", mode="w+", shape=shape + # # ) + # c_img_a = colored_cat.astype("uint16")[:,:, 3] * styles[cat_id]["opacity"] + # + # # TODO apply strides and there will be no need for loop + # # b = as_strided( + # # a, + # # strides=(0, a.strides[3], a.strides[3], a.strides[3]), + # # shape=(3, a.shape[0], a.shape[1]), + # # ) + # + # for i in range(3): + # merged_img[:, :, i] = ( + # merged_img[:, :, i] * (255 - c_img_a) + # + colored_cat[:, :, i] * c_img_a + # ) / 255 + # merged_img[:, :, 3] = ( + # merged_img[:, :, 3] * (255 - c_img_a) + 255 * c_img_a + # ) / 255 + # + # del c_img_a + _rendDtMemmapsToFiles(rend_dt) merged_img = {"dt": merged_img.filename, "sh": merged_img.shape} @@ -633,10 +632,11 @@ class PolygonDrawer: def __init__(self, ax, pol, empty_pol): if pol.figure is None: - raise RuntimeError( + msg = ( "You must first add the polygon to a figure or canvas before defining " "the interactor" ) + raise RuntimeError(msg) self.ax = ax self.canvas = pol.figure.canvas @@ -667,13 +667,14 @@ def __init__(self, ax, pol, empty_pol): self.it = 0 def _getPolygonStyle(self): - style = {} - style["sel_pol"] = UserSettings.Get( - group="scatt", key="selection", subkey="sel_pol" - ) - style["sel_pol_vertex"] = UserSettings.Get( - group="scatt", key="selection", subkey="sel_pol_vertex" - ) + style = { + "sel_pol": UserSettings.Get( + group="scatt", key="selection", subkey="sel_pol" + ), + "sel_pol_vertex": UserSettings.Get( + group="scatt", key="selection", subkey="sel_pol_vertex" + ), + } style["sel_pol"] = [i / 255.0 for i in style["sel_pol"]] style["sel_pol_vertex"] = [i / 255.0 for i in style["sel_pol_vertex"]] @@ -739,14 +740,14 @@ def DrawCallback(self, event): self.ax.draw_artist(self.line) def poly_changed(self, pol): - "this method is called whenever the polygon object is called" + """this method is called whenever the polygon object is called""" # only copy the artist props to the line (except visibility) vis = self.line.get_visible() Artist.update_from(self.line, pol) self.line.set_visible(vis) # don't use the pol visibility state def get_ind_under_point(self, event): - "get the index of the vertex under point if within threshold" + """get the index of the vertex under point if within threshold""" # display coords xy = np.asarray(self.pol.xy) @@ -779,7 +780,7 @@ def OnButtonPressed(self, event): self.moving_ver_idx = self.get_ind_under_point(event) def ButtonReleaseCallback(self, event): - "whenever a mouse button is released" + """whenever a mouse button is released""" if not self.showverts: return if event.button != 1: @@ -859,7 +860,7 @@ def _addVertex(self, event): self.Redraw() def motion_notify_callback(self, event): - "on mouse movement" + """on mouse movement""" if self.mode != "move_vertex": return if not self.showverts: @@ -912,7 +913,8 @@ class ModestImage(mi.AxesImage): def __init__(self, minx=0.0, miny=0.0, *args, **kwargs): if "extent" in kwargs and kwargs["extent"] is not None: - raise NotImplementedError("ModestImage does not support extents") + msg = f"{ModestImage.__name__} does not support extents" + raise NotImplementedError(msg) self._full_res = None self._sx, self._sy = None, None @@ -932,12 +934,14 @@ def set_data(self, A): self._A = A if self._A.dtype != np.uint8 and not np.can_cast(self._A.dtype, float): - raise TypeError("Image data can not convert to float") + msg = "Image data can not convert to float" + raise TypeError(msg) if self._A.ndim not in (2, 3) or ( self._A.ndim == 3 and self._A.shape[-1] not in (3, 4) ): - raise TypeError("Invalid dimensions for image data") + msg = "Invalid dimensions for image data" + raise TypeError(msg) self._imcache = None self._rgbacache = None diff --git a/gui/wxpython/iscatt/toolbars.py b/gui/wxpython/iscatt/toolbars.py index 7f813723282..6745d401823 100644 --- a/gui/wxpython/iscatt/toolbars.py +++ b/gui/wxpython/iscatt/toolbars.py @@ -26,8 +26,7 @@ def get_tool_name(tool_name, tool_name_type=tuple): """Get tool name :param str|tuple tool_name: tool name - :param type tool_name_type: tool name type with default - tuple type + :param type tool_name_type: tool name type with default tuple type :return str: tool name """ @@ -95,19 +94,19 @@ def _toolbarData(self): ( ("pan", icons["pan"].label), icons["pan"], - lambda event: self.SetPloltsMode(event, "pan"), + lambda event: self.SetPlotsMode(event, "pan"), wx.ITEM_CHECK, ), ( ("zoom", icons["zoomIn"].label), icons["zoomIn"], - lambda event: self.SetPloltsMode(event, "zoom"), + lambda event: self.SetPlotsMode(event, "zoom"), wx.ITEM_CHECK, ), ( ("zoom_extend", icons["zoomExtent"].label), icons["zoomExtent"], - lambda event: self.SetPloltsMode(event, "zoom_extend"), + lambda event: self.SetPlotsMode(event, "zoom_extend"), wx.ITEM_CHECK, ), (None,), @@ -145,7 +144,7 @@ def _toolbarData(self): def GetToolId(self, toolName): # TODO can be useful in base return vars(self)[toolName] - def SetPloltsMode(self, event, tool_name): + def SetPlotsMode(self, event, tool_name): self.scatt_mgr.modeSet.disconnect(self.ModeSet) if event.IsChecked(): for i_tool_data in self.controller.data: diff --git a/gui/wxpython/lmgr/frame.py b/gui/wxpython/lmgr/frame.py index 9879ce1d937..0fe98001674 100644 --- a/gui/wxpython/lmgr/frame.py +++ b/gui/wxpython/lmgr/frame.py @@ -578,8 +578,7 @@ def CanCloseDisplay(askIfSaveWorkspace): map display notebook layers tree page index """ - pgnum_dict = {} - pgnum_dict["layers"] = self.notebookLayers.GetPageIndex(page) + pgnum_dict = {"layers": self.notebookLayers.GetPageIndex(page)} name = self.notebookLayers.GetPageText(pgnum_dict["layers"]) caption = _("Close Map Display {}").format(name) if not askIfSaveWorkspace or ( @@ -687,8 +686,7 @@ def show_demo(): def AddNvizTools(self, firstTime): """Add nviz notebook page - :param firstTime: if a mapdisplay is starting 3D mode for the - first time + :param firstTime: if a mapdisplay is starting 3D mode for the first time """ Debug.msg(5, "GMFrame.AddNvizTools()") from nviz.main import haveNviz @@ -982,7 +980,7 @@ def RunDisplayCmd(self, command): """Handles display commands. :param command: command in a list - :return int: False if failed, True if succcess + :return int: False if failed, True if success """ if not self.currentPage: self.NewDisplay(show=True) @@ -1087,11 +1085,10 @@ def GetMapDisplay(self, onlyCurrent=True): return self.GetLayerTree().GetMapDisplay() return None # -> return list of all mapdisplays - mlist = [] - for idx in range(self.notebookLayers.GetPageCount()): - mlist.append(self.notebookLayers.GetPage(idx).maptree.GetMapDisplay()) - - return mlist + return [ + self.notebookLayers.GetPage(idx).maptree.GetMapDisplay() + for idx in range(self.notebookLayers.GetPageCount()) + ] def GetAllMapDisplays(self): """Get all (open) map displays""" @@ -1396,10 +1393,11 @@ def write_help(): # this is programmer's error # can be relaxed in future # but keep it strict unless needed otherwise - raise ValueError( - "OnChangeCWD cmd parameter must be list of" + msg = ( + f"{self.OnChangeCWD.__name__} cmd parameter must be list of" " length 1 or 2 and 'cd' as a first item" ) + raise ValueError(msg) if cmd and len(cmd) > 2: # this might be a user error write_beginning(command=cmd) @@ -1705,10 +1703,11 @@ def OnAnimationTool(self, event=None, cmd=None): tree = self.GetLayerTree() if tree: - rasters = [] - for layer in tree.GetSelectedLayers(checkedOnly=False): - if tree.GetLayerInfo(layer, key="type") == "raster": - rasters.append(tree.GetLayerInfo(layer, key="maplayer").GetName()) + rasters = [ + tree.GetLayerInfo(layer, key="maplayer").GetName() + for layer in tree.GetSelectedLayers(checkedOnly=False) + if tree.GetLayerInfo(layer, key="type") == "raster" + ] if len(rasters) >= 2: from core.layerlist import LayerList from animation.data import AnimLayer diff --git a/gui/wxpython/lmgr/giface.py b/gui/wxpython/lmgr/giface.py index 67669039d5d..065dfa1d411 100644 --- a/gui/wxpython/lmgr/giface.py +++ b/gui/wxpython/lmgr/giface.py @@ -56,7 +56,7 @@ def __init__(self, tree): def __len__(self): # The list constructor calls __len__ as an optimization if available, # causing a RecursionError - return len([layer for layer in self]) # noqa: C416 + return len([layer for layer in self]) # noqa: C416 # pylint: disable=R1721 def __iter__(self): """Iterates over the contents of the list.""" @@ -121,11 +121,11 @@ def CheckLayer(self, layer, checked=True): self._tree.CheckItem(layer._layer, checked=checked) def SelectLayer(self, layer, select=True): - "Select or unselect layer" + """Select or unselect layer""" self._tree.SelectItem(layer._layer, select) def ChangeLayer(self, layer, **kwargs): - "Change layer (cmd, ltype, opacity)" + """Change layer (cmd, ltype, opacity)""" if "cmd" in kwargs: layer._pydata[0]["cmd"] = kwargs["cmd"] layerName, found = GetLayerNameFromCmd(kwargs["cmd"], fullyQualified=True) diff --git a/gui/wxpython/lmgr/layertree.py b/gui/wxpython/lmgr/layertree.py index 350320d97a7..afed1cccc6f 100644 --- a/gui/wxpython/lmgr/layertree.py +++ b/gui/wxpython/lmgr/layertree.py @@ -1111,9 +1111,10 @@ def OnCopyMap(self, event): def OnHistogram(self, event): """Plot histogram for given raster map layer""" - rasterList = [] - for layer in self.GetSelectedLayers(): - rasterList.append(self.GetLayerInfo(layer, key="maplayer").GetName()) + rasterList = [ + self.GetLayerInfo(layer, key="maplayer").GetName() + for layer in self.GetSelectedLayers() + ] if not rasterList: GError( @@ -1151,11 +1152,12 @@ def OnUnivariateStats(self, event): def OnReportStats(self, event): """Print 2D statistics""" - rasters = [] # TODO: Implement self.GetSelectedLayers(ltype='raster') - for layer in self.GetSelectedLayers(): - if self.GetLayerInfo(layer, key="type") == "raster": - rasters.append(self.GetLayerInfo(layer, key="maplayer").GetName()) + rasters = [ + self.GetLayerInfo(layer, key="maplayer").GetName() + for layer in self.GetSelectedLayers() + if self.GetLayerInfo(layer, key="type") == "raster" + ] if rasters: self._giface.RunCmd( @@ -2256,9 +2258,7 @@ def ChangeLayer(self, item): win = self.FindWindowById(self.GetLayerInfo(item, key="ctrl")) if win.GetValue() is not None: cmd = win.GetValue().split(";") - cmdlist = [] - for c in cmd: - cmdlist.append(c.split(" ")) + cmdlist = [c.split(" ") for c in cmd] opac = 1.0 chk = self.IsItemChecked(item) hidden = not self.IsVisible(item) diff --git a/gui/wxpython/lmgr/workspace.py b/gui/wxpython/lmgr/workspace.py index b1e7aa400f0..dd83c1a6f22 100644 --- a/gui/wxpython/lmgr/workspace.py +++ b/gui/wxpython/lmgr/workspace.py @@ -46,7 +46,7 @@ def __init__(self, lmgr, giface): self._giface.workspaceChanged.connect(self.WorkspaceChanged) def WorkspaceChanged(self): - "Update window title" + """Update window title""" self.workspaceChanged = True def New(self): diff --git a/gui/wxpython/location_wizard/dialogs.py b/gui/wxpython/location_wizard/dialogs.py index 079fca1f6d4..74a59cca525 100644 --- a/gui/wxpython/location_wizard/dialogs.py +++ b/gui/wxpython/location_wizard/dialogs.py @@ -550,7 +550,7 @@ def OnValue(self, event): except ValueError as e: if len(event.GetString()) > 0 and event.GetString() != "-": - dlg = wx.MessageBox( + wx.MessageBox( parent=self, message=_("Invalid value: %s") % e, caption=_("Error"), diff --git a/gui/wxpython/location_wizard/wizard.py b/gui/wxpython/location_wizard/wizard.py index 9f3bf14c886..c12fb22860f 100644 --- a/gui/wxpython/location_wizard/wizard.py +++ b/gui/wxpython/location_wizard/wizard.py @@ -34,48 +34,49 @@ @author Hamish Bowman (planetary ellipsoids) """ -import os -import locale -import functools +from __future__ import annotations +import functools +import locale +import os from pathlib import Path +from typing import TYPE_CHECKING import wx import wx.lib.mixins.listctrl as listmix from core import globalvar -if globalvar.wxPythonPhoenix: +if globalvar.wxPythonPhoenix or TYPE_CHECKING: from wx import adv as wiz - from wx.adv import Wizard - from wx.adv import WizardPageSimple + from wx.adv import Wizard, WizardPageSimple else: from wx import wizard as wiz from wx.wizard import Wizard from wx.wizard import WizardPageSimple -import wx.lib.scrolledpanel as scrolled +import wx.lib.scrolledpanel as scrolled from core import utils +from core.gcmd import GError, GWarning, RunCommand from core.utils import cmp -from core.gcmd import RunCommand, GError, GWarning from gui_core.widgets import GenericMultiValidator from gui_core.wrap import ( - SpinCtrl, - SearchCtrl, - StaticText, - TextCtrl, Button, CheckBox, - StaticBox, - NewId, - ListCtrl, HyperlinkCtrl, + ListCtrl, + NewId, + SearchCtrl, + SpinCtrl, + StaticBox, + StaticText, + TextCtrl, ) from location_wizard.dialogs import SelectTransformDialog +from grass.exceptions import OpenError from grass.grassdb.checks import location_exists -from grass.script import decode from grass.script import core as grass -from grass.exceptions import OpenError +from grass.script import decode global coordsys, north, south, east, west, resolution, wizerror, translist @@ -84,6 +85,9 @@ else: search_cancel_evt = wx.EVT_SEARCHCTRL_CANCEL_BTN +if TYPE_CHECKING: + from wx.adv import WizardEvent + class TitledPage(WizardPageSimple): """Class to make wizard pages. Generic methods to make labels, @@ -331,7 +335,7 @@ def OnBrowse(self, event): dlg.Destroy() - def OnPageChanging(self, event=None): + def OnPageChanging(self, event: WizardEvent | None = None) -> None: self.location = self.tlocation.GetValue() self.grassdatabase = self.tgisdbase.GetLabel() self.locTitle = self.tlocTitle.GetValue() @@ -416,7 +420,7 @@ def __init__(self, wizard, parent): self.Bind(wx.EVT_RADIOBUTTON, self.SetVal, id=self.radioXy.GetId()) self.Bind(wiz.EVT_WIZARD_PAGE_CHANGED, self.OnEnterPage) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: global coordsys if not coordsys: @@ -566,7 +570,7 @@ def __init__(self, wizard, parent): self.Bind(wiz.EVT_WIZARD_PAGE_CHANGING, self.OnPageChanging) self.Bind(wiz.EVT_WIZARD_PAGE_CHANGED, self.OnEnterPage) - def OnPageChanging(self, event): + def OnPageChanging(self, event: WizardEvent) -> None: if event.GetDirection() and self.proj not in self.parent.projections.keys(): event.Veto() @@ -598,7 +602,7 @@ def OnText(self, event): self.projdesc = self.parent.projections[self.proj][0] nextButton.Enable() - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: if len(self.proj) == 0: # disable 'next' button by default wx.FindWindowById(wx.ID_FORWARD).Enable(False) @@ -892,7 +896,7 @@ def OnParamEntry(self, event): event.Skip() - def OnPageChange(self, event=None): + def OnPageChange(self, event: WizardEvent | None = None) -> None: """Go to next page""" if event.GetDirection(): self.p4projparams = "" @@ -914,7 +918,7 @@ def OnPageChange(self, event=None): " +" + param["proj4"] + "=" + str(param["value"]) ) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Page entered""" self.projdesc = self.parent.projections[self.parent.projpage.proj][0] if self.prjParamSizer is None: @@ -1059,9 +1063,9 @@ def __init__(self, wizard, parent): self.searchb.ShowCancelButton(True) # create list control for datum/elipsoid list - data = [] - for key in self.parent.datums.keys(): - data.append([key, self.parent.datums[key][0], self.parent.datums[key][1]]) + data = [ + [key, datum[0], datum[1]] for (key, datum) in self.parent.datums.items() + ] self.datumlist = ItemList( self, data=data, columns=[_("Code"), _("Ellipsoid"), _("Description")] ) @@ -1115,7 +1119,7 @@ def __init__(self, wizard, parent): # do page layout # self.DoLayout() - def OnPageChanging(self, event): + def OnPageChanging(self, event: WizardEvent): self.proj4params = "" proj = self.parent.projpage.p4proj @@ -1159,7 +1163,7 @@ def OnPageChanging(self, event): self.ellipse ][1] - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: self.parent.datum_trans = None if event.GetDirection(): if len(self.datum) == 0: @@ -1258,10 +1262,10 @@ def __init__(self, wizard, parent): ) # create list control for ellipse list - data = [] # extract code, desc - for key in self.parent.ellipsoids.keys(): - data.append([key, self.parent.ellipsoids[key][0]]) + data = [ + [key, ellipsoid[0]] for (key, ellipsoid) in self.parent.ellipsoids.items() + ] self.ellipselist = ItemList( self, data=data, columns=[_("Code"), _("Description")] @@ -1330,7 +1334,7 @@ def __init__(self, wizard, parent): self.Bind(wiz.EVT_WIZARD_PAGE_CHANGED, self.OnEnterPage) self.Bind(wiz.EVT_WIZARD_PAGE_CHANGING, self.OnPageChanging) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: if len(self.ellipse) == 0: # disable 'next' button by default wx.FindWindowById(wx.ID_FORWARD).Enable(False) @@ -1339,7 +1343,7 @@ def OnEnterPage(self, event): self.scope = "earth" event.Skip() - def OnPageChanging(self, event): + def OnPageChanging(self, event: WizardEvent) -> None: if ( event.GetDirection() and self.ellipse not in self.parent.ellipsoids @@ -1357,7 +1361,7 @@ def OnPageChanging(self, event): # FIXME: index number doesn't translate when you've given a valid name # from the other list def OnText(self, event): - """Ellipspoid code changed""" + """Ellipsoid code changed""" self.ellipse = event.GetString() nextButton = wx.FindWindowById(wx.ID_FORWARD) if len(self.ellipse) == 0 or ( @@ -1465,7 +1469,7 @@ def __init__(self, wizard, parent): # do page layout # self.DoLayout() - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: if len(self.georeffile) == 0: # disable 'next' button by default wx.FindWindowById(wx.ID_FORWARD).Enable(False) @@ -1474,7 +1478,7 @@ def OnEnterPage(self, event): event.Skip() - def OnPageChanging(self, event): + def OnPageChanging(self, event: WizardEvent) -> None: if event.GetDirection() and not os.path.isfile(self.georeffile): event.Veto() self.GetNext().SetPrev(self) @@ -1540,7 +1544,7 @@ def __init__(self, wizard, parent): self.Bind(wiz.EVT_WIZARD_PAGE_CHANGING, self.OnPageChanging) self.Bind(wiz.EVT_WIZARD_PAGE_CHANGED, self.OnEnterPage) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: if len(self.wktstring) == 0: # disable 'next' button by default wx.FindWindowById(wx.ID_FORWARD).Enable(False) @@ -1549,7 +1553,7 @@ def OnEnterPage(self, event): event.Skip() - def OnPageChanging(self, event): + def OnPageChanging(self, event: WizardEvent) -> None: if event.GetDirection() and not self.wktstring.strip(): event.Veto() self.GetNext().SetPrev(self) @@ -1641,7 +1645,7 @@ def __init__(self, wizard, parent): self.Bind(wiz.EVT_WIZARD_PAGE_CHANGING, self.OnPageChanging) self.Bind(wiz.EVT_WIZARD_PAGE_CHANGED, self.OnEnterPage) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: self.parent.datum_trans = None if event.GetDirection(): if not self.epsgcode: @@ -1654,7 +1658,7 @@ def OnEnterPage(self, event): event.Skip() - def OnPageChanging(self, event): + def OnPageChanging(self, event: WizardEvent): if event.GetDirection(): if not self.epsgcode: event.Veto() @@ -1749,10 +1753,11 @@ def OnBrowseCodes(self, event, search=None): self.epsglist.Populate([], update=True) return - data = [] - for code, val in self.epsgCodeDict.items(): - if code is not None: - data.append((code, val[0], val[1])) + data = [ + (code, val[0], val[1]) + for code, val in self.epsgCodeDict.items() + if code is not None + ] self.epsglist.Populate(data, update=True) @@ -1859,7 +1864,7 @@ def __init__(self, wizard, parent): self.Bind(wiz.EVT_WIZARD_PAGE_CHANGING, self.OnPageChanging) self.Bind(wiz.EVT_WIZARD_PAGE_CHANGED, self.OnEnterPage) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: self.parent.datum_trans = None if event.GetDirection(): if not self.epsgcode: @@ -1873,7 +1878,7 @@ def OnEnterPage(self, event): event.Skip() - def OnPageChanging(self, event): + def OnPageChanging(self, event: WizardEvent): if event.GetDirection(): if not self.epsgcode: event.Veto() @@ -2021,10 +2026,11 @@ def OnBrowseCodes(self, event, search=None): self.epsglist.Populate([], update=True) return - data = [] - for code, val in self.epsgCodeDict.items(): - if code is not None: - data.append((code, val[0], val[1])) + data = [ + (code, val[0], val[1]) + for code, val in self.epsgCodeDict.items() + if code is not None + ] self.epsglist.Populate(data, update=True) @@ -2070,14 +2076,14 @@ def __init__(self, wizard, parent): self.Bind(wiz.EVT_WIZARD_PAGE_CHANGING, self.OnPageChanging) self.Bind(wiz.EVT_WIZARD_PAGE_CHANGED, self.OnEnterPage) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: if len(self.customstring) == 0: # disable 'next' button by default wx.FindWindowById(wx.ID_FORWARD).Enable(False) else: wx.FindWindowById(wx.ID_FORWARD).Enable(True) - def OnPageChanging(self, event): + def OnPageChanging(self, event: WizardEvent): if event.GetDirection(): self.custom_dtrans_string = "" @@ -2268,7 +2274,7 @@ def _doLayout(self): self.sizer.AddGrowableRow(4, 1) self.sizer.AddGrowableRow(5, 5) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Insert values into text controls for summary of location creation options """ @@ -2283,10 +2289,7 @@ def OnEnterPage(self, event): # print coordsys,proj4string if coordsys in {"proj", "epsg", "iau", "wkt", "file"}: - extra_opts = {} - extra_opts["project"] = "project" - extra_opts["getErrorMsg"] = True - extra_opts["read"] = True + extra_opts = {"project": "project", "getErrorMsg": True, "read": True} if coordsys == "proj": if len(datum) > 0: @@ -2769,7 +2772,7 @@ def CreateProj4String(self): return "%s +no_defs" % proj4string - def OnHelp(self, event): + def OnHelp(self, event: WizardEvent) -> None: """'Help' button clicked""" # help text in lib/init/helptext.html diff --git a/gui/wxpython/main_window/frame.py b/gui/wxpython/main_window/frame.py index 3574e7c8951..7af480925a4 100644 --- a/gui/wxpython/main_window/frame.py +++ b/gui/wxpython/main_window/frame.py @@ -521,9 +521,11 @@ def CanCloseDisplay(askIfSaveWorkspace): map display notebook page index (single window mode) """ - pgnum_dict = {} - pgnum_dict["layers"] = self.notebookLayers.GetPageIndex(page) - pgnum_dict["mainnotebook"] = self.mainnotebook.GetPageIndex(mapdisplay) + pgnum_dict = { + "layers": self.notebookLayers.GetPageIndex(page), + "mainnotebook": self.mainnotebook.GetPageIndex(mapdisplay), + } + name = self.notebookLayers.GetPageText(pgnum_dict["layers"]) caption = _("Close Map Display {}").format(name) if not askIfSaveWorkspace or ( @@ -790,8 +792,7 @@ def show_demo(): def AddNvizTools(self, firstTime): """Add nviz notebook page - :param firstTime: if a mapdisplay is starting 3D mode for the - first time + :param firstTime: if a mapdisplay is starting 3D mode for the first time """ Debug.msg(5, "GMFrame.AddNvizTools()") from nviz.main import haveNviz @@ -1051,7 +1052,7 @@ def _closePageNoEvent(self, pgnum_dict, is_docked): notebook layers tree page index and "mainnotebook" key represent map display notebook page index (single window mode) - boolean is_docked: "True" means that map display is docked in map + :param boolean is_docked: "True" means that map display is docked in map display notebook, "False" means that map display is undocked to independent frame """ @@ -1120,7 +1121,7 @@ def RunDisplayCmd(self, command): """Handles display commands. :param command: command in a list - :return int: False if failed, True if succcess + :return int: False if failed, True if success """ if not self.currentPage: self.NewDisplay(show=True) @@ -1239,11 +1240,10 @@ def GetMapDisplay(self, onlyCurrent=True): return self.GetLayerTree().GetMapDisplay() return None # -> return list of all mapdisplays - mlist = [] - for idx in range(self.notebookLayers.GetPageCount()): - mlist.append(self.notebookLayers.GetPage(idx).maptree.GetMapDisplay()) - - return mlist + return [ + self.notebookLayers.GetPage(idx).maptree.GetMapDisplay() + for idx in range(self.notebookLayers.GetPageCount()) + ] def GetAllMapDisplays(self): """Get all (open) map displays""" @@ -1548,10 +1548,11 @@ def write_help(): # this is programmer's error # can be relaxed in future # but keep it strict unless needed otherwise - raise ValueError( - "OnChangeCWD cmd parameter must be list of" + msg = ( + f"{self.OnChangeCWD.__name__} cmd parameter must be a list of" " length 1 or 2 and 'cd' as a first item" ) + raise ValueError(msg) if cmd and len(cmd) > 2: # this might be a user error write_beginning(command=cmd) @@ -1856,10 +1857,11 @@ def OnAnimationTool(self, event=None, cmd=None): tree = self.GetLayerTree() if tree: - rasters = [] - for layer in tree.GetSelectedLayers(checkedOnly=False): - if tree.GetLayerInfo(layer, key="type") == "raster": - rasters.append(tree.GetLayerInfo(layer, key="maplayer").GetName()) + rasters = [ + tree.GetLayerInfo(layer, key="maplayer").GetName() + for layer in tree.GetSelectedLayers(checkedOnly=False) + if tree.GetLayerInfo(layer, key="type") == "raster" + ] if len(rasters) >= 2: from core.layerlist import LayerList from animation.data import AnimLayer diff --git a/gui/wxpython/mapdisp/frame.py b/gui/wxpython/mapdisp/frame.py index 3133341a334..0be010f3432 100644 --- a/gui/wxpython/mapdisp/frame.py +++ b/gui/wxpython/mapdisp/frame.py @@ -22,8 +22,11 @@ @author Stepan Turek (handlers support) """ -import os +from __future__ import annotations + import copy +import os +from typing import TYPE_CHECKING from core import globalvar import wx @@ -40,16 +43,16 @@ from gui_core.query import QueryDialog, PrepareQueryResults from mapwin.buffered import BufferedMapWindow from mapwin.decorations import ( - LegendController, - BarscaleController, ArrowController, + BarscaleController, DtextController, + LegendController, LegendVectController, ) from mapwin.analysis import ( - ProfileController, - MeasureDistanceController, MeasureAreaController, + MeasureDistanceController, + ProfileController, ) from gui_core.forms import GUI from core.giface import Notification @@ -59,9 +62,12 @@ from main_window.page import MainPageBase import grass.script as gs - from grass.pydispatch.signal import Signal +if TYPE_CHECKING: + import lmgr.frame + import main_window.frame + class MapPanel(SingleMapPanel, MainPageBase): """Main panel for map display window. Drawing takes place in @@ -76,7 +82,7 @@ def __init__( toolbars=["map"], statusbar=True, tree=None, - lmgr=None, + lmgr: main_window.frame.GMFrame | lmgr.frame.GMFrame | None = None, Map=None, auimgr=None, dockable=False, @@ -296,7 +302,7 @@ def GetMapWindow(self): def _addToolbarVDigit(self): """Add vector digitizer toolbar""" - from vdigit.main import haveVDigit, VDigit + from vdigit.main import VDigit, haveVDigit from vdigit.toolbars import VDigitToolbar if not haveVDigit: @@ -401,7 +407,7 @@ def _updateVDigitLayers(self, layer): def AddNviz(self): """Add 3D view mode window""" - from nviz.main import haveNviz, GLWindow, errorMsg + from nviz.main import GLWindow, errorMsg, haveNviz # check for GLCanvas and OpenGL if not haveNviz: @@ -1142,9 +1148,7 @@ def _queryHighlight(self, vectQuery): self._highlighter_layer.SetMap( vectQuery[0]["Map"] + "@" + vectQuery[0]["Mapset"] ) - tmp = [] - for i in vectQuery: - tmp.append(i["Category"]) + tmp = [i["Category"] for i in vectQuery] self._highlighter_layer.SetCats(tmp) self._highlighter_layer.DrawSelected() @@ -1158,7 +1162,9 @@ def OnQuery(self, event): # change the cursor self.MapWindow.SetNamedCursor("cross") - def AddTmpVectorMapLayer(self, name, cats, useId=False, addLayer=True): + def AddTmpVectorMapLayer( + self, name, cats, useId: bool = False, addLayer: bool = True + ): """Add temporal vector map layer to map composition :param name: name of map layer @@ -1277,11 +1283,11 @@ def Profile(self, rasters=None): def OnHistogramPyPlot(self, event): """Init PyPlot histogram display canvas and tools""" - raster = [] - - for layer in self._giface.GetLayerList().GetSelectedLayers(): - if layer.maplayer.GetType() == "raster": - raster.append(layer.maplayer.GetName()) + raster = [ + layer.maplayer.GetName() + for layer in self._giface.GetLayerList().GetSelectedLayers() + if layer.maplayer.GetType() == "raster" + ] from wxplot.histogram import HistogramPlotFrame @@ -1291,11 +1297,11 @@ def OnHistogramPyPlot(self, event): def OnScatterplot(self, event): """Init PyPlot scatterplot display canvas and tools""" - raster = [] - - for layer in self._giface.GetLayerList().GetSelectedLayers(): - if layer.maplayer.GetType() == "raster": - raster.append(layer.maplayer.GetName()) + raster = [ + layer.maplayer.GetName() + for layer in self._giface.GetLayerList().GetSelectedLayers() + if layer.maplayer.GetType() == "raster" + ] from wxplot.scatter import ScatterFrame @@ -1500,7 +1506,7 @@ def OnSetWindToRegion(self, event): self.MapWindow.SetRegion(zoomOnly=False) def OnSetExtentToWind(self, event): - """Set compulational region extent interactively""" + """Set computational region extent interactively""" self.MapWindow.SetModeDrawRegion() def OnSaveDisplayRegion(self, event): @@ -1601,7 +1607,7 @@ def _switchMapWindow(self, map_win): def AddRDigit(self): """Adds raster digitizer: creates toolbar and digitizer controller, binds events and signals.""" - from rdigit.controller import RDigitController, EVT_UPDATE_PROGRESS + from rdigit.controller import EVT_UPDATE_PROGRESS, RDigitController from rdigit.toolbars import RDigitToolbar self.rdigit = RDigitController(self._giface, mapWindow=self.GetMapWindow()) diff --git a/gui/wxpython/mapdisp/gprint.py b/gui/wxpython/mapdisp/gprint.py index 3172f58eab5..e95ca1003f7 100644 --- a/gui/wxpython/mapdisp/gprint.py +++ b/gui/wxpython/mapdisp/gprint.py @@ -26,21 +26,6 @@ def __init__(self, canvas): wx.Printout.__init__(self) self.canvas = canvas - def OnBeginDocument(self, start, end): - return super().OnBeginDocument(start, end) - - def OnEndDocument(self): - super().OnEndDocument() - - def OnBeginPrinting(self): - super().OnBeginPrinting() - - def OnEndPrinting(self): - super().OnEndPrinting() - - def OnPreparePrinting(self): - super().OnPreparePrinting() - def HasPage(self, page) -> bool: return page <= 2 diff --git a/gui/wxpython/mapdisp/statusbar.py b/gui/wxpython/mapdisp/statusbar.py index 8cf5c3b6ff7..7e60e762281 100644 --- a/gui/wxpython/mapdisp/statusbar.py +++ b/gui/wxpython/mapdisp/statusbar.py @@ -223,11 +223,13 @@ def Reposition(self): It should not be necessary to call it manually. """ - widgets = [] - for item in self.statusbarItems.values(): - widgets.append((item.GetPosition(), item.GetWidget())) - - widgets.append((1, self.progressbar.GetWidget())) + widgets = [ + *[ + (item.GetPosition(), item.GetWidget()) + for item in self.statusbarItems.values() + ], + (1, self.progressbar.GetWidget()), + ] for idx, win in widgets: if not win: @@ -756,7 +758,7 @@ def Show(self): self.SetValue(e.message) # TODO: remove these excepts, they just hide errors, solve problems # differently - except TypeError as e: + except TypeError: self.SetValue("") except AttributeError: # during initialization MapFrame has no MapWindow diff --git a/gui/wxpython/mapdisp/test_mapdisp.py b/gui/wxpython/mapdisp/test_mapdisp.py index 1494988fafb..d2265e235b7 100755 --- a/gui/wxpython/mapdisp/test_mapdisp.py +++ b/gui/wxpython/mapdisp/test_mapdisp.py @@ -44,7 +44,7 @@ # %end """ -Module to run test map window (BufferedWidnow) and map display (MapFrame). +Module to run test map window (BufferedMapWindow) and map display (MapFrame). @author Vaclav Petras """ @@ -267,10 +267,11 @@ def testMapWindowProfile(self, giface, map_): self.controller = ProfileController(giface, window) self.controller.Start() - rasters = [] - for layer in giface.GetLayerList().GetSelectedLayers(): - if layer.maplayer.GetType() == "raster": - rasters.append(layer.maplayer.GetName()) + rasters = [ + layer.maplayer.GetName() + for layer in giface.GetLayerList().GetSelectedLayers() + if layer.maplayer.GetType() == "raster" + ] from wxplot.profile import ProfileFrame diff --git a/gui/wxpython/mapswipe/g.gui.mapswipe.html b/gui/wxpython/mapswipe/g.gui.mapswipe.html index 58c2e28d264..105cf612c74 100644 --- a/gui/wxpython/mapswipe/g.gui.mapswipe.html +++ b/gui/wxpython/mapswipe/g.gui.mapswipe.html @@ -6,7 +6,7 @@

    DESCRIPTION

    which allows the user to interactively compare two raster maps of the same area by revealing different parts of the raster maps. It is useful e.g. for comparing raster maps from different time periods. -Map Swipe can be launched from the menu File -> Map Swipe. +Map Swipe can be launched from the menu File -> Map Swipe.

    Map Swipe allows you to: @@ -32,14 +32,14 @@

    DESCRIPTION

    MODIS image on March 13, 2011, shows a clear view of tsunami flooding along the coastline. Water, black and dark blue in these false-color images, still covers the ground as much as five kilometers (three miles) from the coast. -Source: Earth Observatory/NASA +Source: Earth Observatory/NASA

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/mapwin/base.py b/gui/wxpython/mapwin/base.py index c05cc8f840b..3a53599cb38 100644 --- a/gui/wxpython/mapwin/base.py +++ b/gui/wxpython/mapwin/base.py @@ -271,7 +271,6 @@ def OnMouseAction(self, event): ) event.Skip() - Emits mouseHandlerRegistered signal before handler is registered. :param event: one of mouse events diff --git a/gui/wxpython/mapwin/buffered.py b/gui/wxpython/mapwin/buffered.py index 16b6b329ef6..7e42ebc57c9 100644 --- a/gui/wxpython/mapwin/buffered.py +++ b/gui/wxpython/mapwin/buffered.py @@ -555,14 +555,14 @@ def TextBounds(self, textinfo, relcoords=False): boxh = math.fabs(math.sin(math.radians(rotation)) * w) + h boxw = math.fabs(math.cos(math.radians(rotation)) * w) + h - if rotation > 0 and rotation < 90: + if 0 < rotation < 90: bbox[1] -= boxh relCoords = (0, boxh) - elif rotation >= 90 and rotation < 180: + elif 90 <= rotation < 180: bbox[0] -= boxw bbox[1] -= boxh relCoords = (boxw, boxh) - elif rotation >= 180 and rotation < 270: + elif 180 <= rotation < 270: bbox[0] -= boxw relCoords = (boxw, 0) bbox[2] = boxw @@ -1062,16 +1062,13 @@ def DrawCompRegionExtent(self): dispReg = self.Map.GetCurrentRegion() reg = dispReg if utils.isInRegion(dispReg, compReg) else compReg - regionCoords = [] - regionCoords.extend( - ( - (reg["w"], reg["n"]), - (reg["e"], reg["n"]), - (reg["e"], reg["s"]), - (reg["w"], reg["s"]), - (reg["w"], reg["n"]), - ) - ) + regionCoords = [ + (reg["w"], reg["n"]), + (reg["e"], reg["n"]), + (reg["e"], reg["s"]), + (reg["w"], reg["s"]), + (reg["w"], reg["n"]), + ] # draw region extent self.polypen = wx.Pen( @@ -1214,8 +1211,7 @@ def DrawLines(self, pdc=None, polycoords=None): Set self.pline to wx.NEW_ID + 1 :param polycoords: list of polyline vertices, geographical - coordinates (if not given, self.polycoords - is used) + coordinates (if not given, self.polycoords is used) """ if not pdc: pdc = self.pdcTmp @@ -2102,7 +2098,7 @@ def DisplayToWind(self): self.UpdateMap(render=False) def SetRegion(self, zoomOnly=True): - """Set display extents/compulational region from named region + """Set display extents/computational region from named region file. :param zoomOnly: zoom to named region only (computational region is not saved) @@ -2110,7 +2106,7 @@ def SetRegion(self, zoomOnly=True): if zoomOnly: label = _("Zoom to saved region extents") else: - label = _("Set compulational region from named region") + label = _("Set computational region from named region") dlg = SavedRegion(parent=self, title=label, loadsave="load") if dlg.ShowModal() == wx.ID_CANCEL or not dlg.GetName(): @@ -2144,7 +2140,7 @@ def SetRegion(self, zoomOnly=True): self.UpdateMap() def SaveRegion(self, display=True): - """Save display extents/compulational region to named region + """Save display extents/computational region to named region file. :param display: True for display extends otherwise computational region diff --git a/gui/wxpython/mapwin/decorations.py b/gui/wxpython/mapwin/decorations.py index 9c643d17752..57da7ec2265 100644 --- a/gui/wxpython/mapwin/decorations.py +++ b/gui/wxpython/mapwin/decorations.py @@ -301,7 +301,7 @@ def GetPlacement(self, screensize): else: b, t, l, r = ( float(number) for number in param.split("=")[1].split(",") - ) # pylint: disable-msg=W0612 + ) # pylint: disable=W0612 x = int((l / 100.0) * screensize[0]) y = int((1 - t / 100.0) * screensize[1]) diff --git a/gui/wxpython/mapwin/graphics.py b/gui/wxpython/mapwin/graphics.py index 0bc94bae6d2..ad673fd24ce 100644 --- a/gui/wxpython/mapwin/graphics.py +++ b/gui/wxpython/mapwin/graphics.py @@ -200,6 +200,7 @@ def AddItem(self, coords, penName=None, label=None, hide=False): Could be 'point' or 'line' according to graphicsType. :param coords: list of east, north coordinates (double) of item. + Example: * point: [1023, 122] @@ -211,8 +212,7 @@ def AddItem(self, coords, penName=None, label=None, hide=False): relevant just for 'point' type. :type label: str :param hide: if it is True, the item is not drawn when self.Draw - is called. Hidden items are also counted in drawing - order. + is called. Hidden items are also counted in drawing order. :type hide: bool :return: (GraphicsSetItem) - added item reference """ @@ -363,7 +363,7 @@ def SetItemDrawOrder(self, item, drawNum): :return: True if order was changed :return: False if drawNum is out of range or item was not found """ - if drawNum < len(self.itemsList) and drawNum >= 0 and item in self.itemsList: + if 0 <= drawNum < len(self.itemsList) and item in self.itemsList: self.itemsList.insert( drawNum, self.itemsList.pop(self.itemsList.index(item)) ) @@ -388,7 +388,7 @@ def _clearId(self, drawid): """Clears old object before drawing new object.""" try: self.pdc.ClearId(drawid) - except: + except (wx.PyDeadObjectError, KeyError): pass diff --git a/gui/wxpython/modules/colorrules.py b/gui/wxpython/modules/colorrules.py index dcb34619b57..d5d74eaf08e 100644 --- a/gui/wxpython/modules/colorrules.py +++ b/gui/wxpython/modules/colorrules.py @@ -203,8 +203,7 @@ def OnRuleEnable(self, event): self.mainPanel.FindWindowById(id + 1000).Enable() self.mainPanel.FindWindowById(id + 2000).Enable() if self.mapType == "vector" and not self.parent.GetParent().colorTable: - vals = [] - vals.append(self.mainPanel.FindWindowById(id + 1000).GetValue()) + vals = [self.mainPanel.FindWindowById(id + 1000).GetValue()] try: vals.append(self.mainPanel.FindWindowById(id + 1 + 1000).GetValue()) except AttributeError: @@ -273,8 +272,7 @@ def SetRasterRule(self, num, val): def SetVectorRule(self, num, val): """Set vector rule""" - vals = [] - vals.append(val) + vals = [val] try: vals.append(self.mainPanel.FindWindowById(num + 1).GetValue()) except AttributeError: @@ -725,9 +723,7 @@ def ReadColorTable(self, ctable): self.rulesPanel.ruleslines[count]["value"] = value self.rulesPanel.ruleslines[count]["color"] = color self.rulesPanel.mainPanel.FindWindowById(count + 1000).SetValue(value) - rgb = [] - for c in color.split(":"): - rgb.append(int(c)) + rgb = [int(c) for c in color.split(":")] self.rulesPanel.mainPanel.FindWindowById(count + 2000).SetColour(rgb) # range try: diff --git a/gui/wxpython/modules/import_export.py b/gui/wxpython/modules/import_export.py index db4e3b517a2..c6e3b9c6d42 100644 --- a/gui/wxpython/modules/import_export.py +++ b/gui/wxpython/modules/import_export.py @@ -337,11 +337,9 @@ def AddLayers(self, returncode, cmd=None, userData=None): if self.importType == "gdal": nBands = int(userData.get("nbands", 1)) if userData else 1 - if UserSettings.Get(group="rasterLayer", key="opaque", subkey="enabled"): - nFlag = True - else: - nFlag = False - + nFlag = bool( + UserSettings.Get(group="rasterLayer", key="opaque", subkey="enabled") + ) for i in range(1, nBands + 1): nameOrig = name if nBands > 1: diff --git a/gui/wxpython/nviz/mapwindow.py b/gui/wxpython/nviz/mapwindow.py index b6609427895..cf0bd87b740 100644 --- a/gui/wxpython/nviz/mapwindow.py +++ b/gui/wxpython/nviz/mapwindow.py @@ -18,18 +18,20 @@ @author Anna Kratochvilova (Google SoC 2011) """ +from __future__ import annotations + +import copy +import math import os import sys import time -import copy -import math - from threading import Thread +from typing import TYPE_CHECKING import wx from wx.lib.newevent import NewEvent from wx import glcanvas -from wx.glcanvas import WX_GL_RGBA, WX_GL_DOUBLEBUFFER, WX_GL_DEPTH_SIZE +from wx.glcanvas import WX_GL_DEPTH_SIZE, WX_GL_DOUBLEBUFFER, WX_GL_RGBA import grass.script as gs from grass.pydispatch.signal import Signal @@ -45,6 +47,10 @@ from core.utils import str2rgb from core.giface import Notification +if TYPE_CHECKING: + import lmgr.frame + import main_window.frame + wxUpdateProperties, EVT_UPDATE_PROP = NewEvent() wxUpdateView, EVT_UPDATE_VIEW = NewEvent() wxUpdateLight, EVT_UPDATE_LIGHT = NewEvent() @@ -74,7 +80,16 @@ def GetDisplay(self): class GLWindow(MapWindowBase, glcanvas.GLCanvas): """OpenGL canvas for Map Display Window""" - def __init__(self, parent, giface, frame, Map, tree, lmgr, id=wx.ID_ANY): + def __init__( + self, + parent, + giface, + frame, + Map, + tree, + lmgr: main_window.frame.GMFrame | lmgr.frame.GMFrame, + id=wx.ID_ANY, + ) -> None: """All parameters except for id are mandatory. The todo is to remove them completely.""" self.parent = parent @@ -335,7 +350,7 @@ def ComputeFlyValues(self, mx, my): self.fly["value"][2] = -my * 100.0 * self.fly["interval"] / 1000.0 def ChangeFlySpeed(self, increase): - """Increase/decrease flight spped""" + """Increase/decrease flight speed""" if increase: self.fly["flySpeed"] += self.fly["flySpeedStep"] else: @@ -1759,8 +1774,7 @@ def LoadVector(self, item, points=None, append=True): """Load 2D or 3D vector map overlay :param item: layer item - :param points: True to load points, False to load lines, None - to load both + :param points: True to load points, False to load lines, None to load both :param bool append: append vector to layer list """ layer = self.tree.GetLayerInfo(item, key="maplayer") diff --git a/gui/wxpython/nviz/preferences.py b/gui/wxpython/nviz/preferences.py index 9bd4614a670..ebd7e5d66b5 100644 --- a/gui/wxpython/nviz/preferences.py +++ b/gui/wxpython/nviz/preferences.py @@ -258,7 +258,7 @@ def _createFlyPage(self, notebook): notebook.AddPage(page=panel, text=" %s " % _("Fly-through")) pageSizer = wx.BoxSizer(wx.VERTICAL) - # fly throuhg mode + # fly through mode box = StaticBox( parent=panel, id=wx.ID_ANY, label=" %s " % (_("Fly-through mode")) ) diff --git a/gui/wxpython/nviz/tools.py b/gui/wxpython/nviz/tools.py index 77f379ef019..49cbec8067e 100644 --- a/gui/wxpython/nviz/tools.py +++ b/gui/wxpython/nviz/tools.py @@ -296,10 +296,8 @@ def _resizeScrolledPanel(self, foldPanelBar, scrolledPanel, collapsed, expanded) :param obj foldPanelBar: FolPanelBar widget obj instance :param obj scrolledPanel: ScrolledPanel widget obj instance - :param int collapsed: number of collapsed panels of FoldPanelBar - widget - :param int expanded: number of expanded panels of FoldPanelBar - widget + :param int collapsed: number of collapsed panels of FoldPanelBar widget + :param int expanded: number of expanded panels of FoldPanelBar widget """ if expanded > 0: foldPanelBar.Expand(foldPanelBar.GetFoldPanel(0)) @@ -1938,9 +1936,10 @@ def _createVectorPage(self, parent): def GselectOnPopup(self, ltype, exclude=False): """Update gselect.Select() items""" - maps = [] - for layer in self.mapWindow.Map.GetListOfLayers(ltype=ltype, active=True): - maps.append(layer.GetName()) + maps = [ + layer.GetName() + for layer in self.mapWindow.Map.GetListOfLayers(ltype=ltype, active=True) + ] return maps, exclude def _createVolumePage(self, parent): @@ -3791,7 +3790,7 @@ def OnSurfaceModeAll(self, event): for name in self.mapWindow.GetLayerNames(type="raster"): data = self._getLayerPropertiesByName(name, mapType="raster") if not data: - continue # shouldy no happen + continue # should not happen data["surface"]["draw"]["all"] = True data["surface"]["draw"]["mode"] = { @@ -3835,10 +3834,7 @@ def _get3dRange(self, name): """Gelper func for getting range of 3d map""" ret = RunCommand("r3.info", read=True, flags="r", map=name) if ret: - range = [] - for value in ret.strip("\n").split("\n"): - range.append(float(value.split("=")[1])) - return range + return [float(value.split("=")[1]) for value in ret.strip("\n").split("\n")] return -1e6, 1e6 @@ -5185,11 +5181,9 @@ def UpdatePage(self, pageId): self.EnablePage("constant", True) elif pageId == "cplane": count = self._display.GetCPlanesCount() - choices = [ - _("None"), + choices = [_("None")] + [ + "%s %i" % (_("Plane"), plane + 1) for plane in range(count) ] - for plane in range(count): - choices.append("%s %i" % (_("Plane"), plane + 1)) self.FindWindowById(self.win["cplane"]["planes"]).SetItems(choices) current = 0 for i, cplane in enumerate(self.mapWindow.cplanes): diff --git a/gui/wxpython/nviz/workspace.py b/gui/wxpython/nviz/workspace.py index ac9e0582b2d..13df2ad981d 100644 --- a/gui/wxpython/nviz/workspace.py +++ b/gui/wxpython/nviz/workspace.py @@ -30,9 +30,7 @@ def __init__(self): def SetConstantDefaultProp(self): """Set default constant data properties""" - data = {} - for key, value in UserSettings.Get(group="nviz", key="constant").items(): - data[key] = value + data = dict(UserSettings.Get(group="nviz", key="constant").items()) color = ( str(data["color"][0]) + ":" @@ -137,10 +135,7 @@ def SetVolumeDefaultProp(self): group="nviz", key="volume", subkey=["draw", "mode"] ) desc = "isosurface" if sel == 0 else "slice" - data["draw"]["mode"] = { - "value": sel, - "desc": desc, - } + data["draw"]["mode"] = {"value": sel, "desc": desc} elif control == "box": box = UserSettings.Get( group="nviz", key="volume", subkey=["draw", "box"] @@ -184,10 +179,11 @@ def SetIsosurfaceDefaultProp(self): def SetSliceDefaultProp(self): """Set default slice properties""" - data = {} - data["position"] = copy.deepcopy( - UserSettings.Get(group="nviz", key="volume", subkey="slice_position") - ) + data = { + "position": copy.deepcopy( + UserSettings.Get(group="nviz", key="volume", subkey="slice_position") + ) + } data["position"]["update"] = None data["transp"] = copy.deepcopy( @@ -321,8 +317,7 @@ def GetDrawMode(self, mode=None, style=None, shade=None, string=False): :param mode: :param style: :param shade: - :param string: if True input parameters are strings otherwise - selections + :param string: if True input parameters are strings otherwise selections """ if not wxnviz: return None diff --git a/gui/wxpython/nviz/wxnviz.py b/gui/wxpython/nviz/wxnviz.py index d11e03d290b..63be18a69c1 100644 --- a/gui/wxpython/nviz/wxnviz.py +++ b/gui/wxpython/nviz/wxnviz.py @@ -34,7 +34,7 @@ "This module requires the NumPy module, which could not be " "imported. It probably is not installed (it's not part of the " "standard Python distribution). See the Numeric Python site " - "(http://numpy.scipy.org) for information on downloading source or " + "(https://numpy.org) for information on downloading source or " "binaries." ) print("wxnviz.py: " + msg, file=sys.stderr) @@ -289,7 +289,8 @@ def print_progress(value): class Nviz: - def __init__(self, glog, gprogress): + + def __init__(self, glog, gprogress) -> None: """Initialize Nviz class instance :param glog: logging area @@ -310,7 +311,8 @@ def __init__(self, glog, gprogress): self.data = pointer(self.data_obj) self.color_obj = Colors() self.color = pointer(self.color_obj) - + self.width: int + self.height: int self.width = self.height = -1 self.showLight = False @@ -355,13 +357,13 @@ def GetLongDim(self): """Get longest dimension, used for initial size of north arrow""" return Nviz_get_longdim(self.data) - def SetViewDefault(self): + def SetViewDefault(self) -> tuple[float, float, float, float]: """Set default view (based on loaded data) :return: z-exag value, default, min and max height """ # determine z-exag - z_exag = Nviz_get_exag() + z_exag: float = Nviz_get_exag() Nviz_change_exag(self.data, z_exag) # determine height @@ -402,7 +404,7 @@ def SetView(self, x, y, height, persp, twist): twist, ) - def GetViewpointPosition(self): + def GetViewpointPosition(self) -> tuple[float, float, float]: x = c_double() y = c_double() h = c_double() @@ -411,7 +413,7 @@ def GetViewpointPosition(self): return (x.value, y.value, h.value) - def LookHere(self, x, y, scale=1): + def LookHere(self, x, y, scale: float = 1) -> None: """Look here feature :param x,y: screen coordinates """ @@ -435,12 +437,12 @@ def GetFocus(self): return x.value, y.value, z.value return -1, -1, -1 - def SetFocus(self, x, y, z): + def SetFocus(self, x: float, y: float, z: float) -> None: """Set focus""" Debug.msg(3, "Nviz::SetFocus()") Nviz_set_focus(self.data, x, y, z) - def GetViewdir(self): + def GetViewdir(self) -> tuple[float, float, float]: """Get viewdir""" Debug.msg(3, "Nviz::GetViewdir()") dir = (c_float * 3)() @@ -448,7 +450,7 @@ def GetViewdir(self): return dir[0], dir[1], dir[2] - def SetViewdir(self, x, y, z): + def SetViewdir(self, x: float, y: float, z: float) -> None: """Set viewdir""" Debug.msg(3, "Nviz::SetViewdir(): x=%f, y=%f, z=%f" % (x, y, z)) dir = (c_float * 3)() @@ -466,7 +468,7 @@ def SetZExag(self, z_exag): Debug.msg(3, "Nviz::SetZExag(): z_exag=%f", z_exag) return Nviz_change_exag(self.data, z_exag) - def Draw(self, quick, quick_mode): + def Draw(self, quick: bool, quick_mode: int) -> None: """Draw canvas Draw quick mode: @@ -487,12 +489,12 @@ def Draw(self, quick, quick_mode): else: Nviz_draw_all(self.data) - def EraseMap(self): + def EraseMap(self) -> None: """Erase map display (with background color)""" Debug.msg(1, "Nviz::EraseMap()") GS_clear(Nviz_get_bgcolor(self.data)) - def InitView(self): + def InitView(self) -> None: """Initialize view""" # initialize nviz data Nviz_init_data(self.data) @@ -508,14 +510,24 @@ def InitView(self): Debug.msg(1, "Nviz::InitView()") - def SetBgColor(self, color_str): + def SetBgColor(self, color_str: str) -> None: """Set background color :param str color_str: color string """ Nviz_set_bgcolor(self.data, Nviz_color_from_str(color_str)) - def SetLight(self, x, y, z, color, bright, ambient, w=0, lid=1): + def SetLight( + self, + x: float, + y: float, + z: float, + color, + bright: float, + ambient: float, + w: float = 0, + lid: int = 1, + ) -> None: """Change lighting settings :param x,y,z: position @@ -1048,7 +1060,7 @@ def SetWireColor(self, id, color_str): """ Debug.msg(3, "Nviz::SetWireColor(): id=%d, color=%s", id, color_str) - color = Nviz_color_from_str(color_str) + color: int = Nviz_color_from_str(color_str) if id > 0: if not GS_surf_exists(id): @@ -2001,11 +2013,11 @@ def SetVolumeDrawBox(self, id, ifBox): def GetCPlaneCurrent(self): return Nviz_get_current_cplane(self.data) - def GetCPlanesCount(self): + def GetCPlanesCount(self) -> int: """Returns number of cutting planes""" return Nviz_num_cplanes(self.data) - def GetCPlaneRotation(self): + def GetCPlaneRotation(self) -> tuple[float, float, float]: """Returns rotation parameters of current cutting plane""" x, y, z = c_float(), c_float(), c_float() @@ -2014,7 +2026,7 @@ def GetCPlaneRotation(self): return x.value, y.value, z.value - def GetCPlaneTranslation(self): + def GetCPlaneTranslation(self) -> tuple[float, float, float]: """Returns translation parameters of current cutting plane""" x, y, z = c_float(), c_float(), c_float() @@ -2023,7 +2035,7 @@ def GetCPlaneTranslation(self): return x.value, y.value, z.value - def SetCPlaneRotation(self, x, y, z): + def SetCPlaneRotation(self, x: float, y: float, z: float) -> None: """Set current clip plane rotation :param x,y,z: rotation parameters @@ -2032,7 +2044,7 @@ def SetCPlaneRotation(self, x, y, z): Nviz_set_cplane_rotation(self.data, current, x, y, z) Nviz_draw_cplane(self.data, -1, -1) - def SetCPlaneTranslation(self, x, y, z): + def SetCPlaneTranslation(self, x: float, y: float, z: float) -> None: """Set current clip plane translation :param x,y,z: translation parameters @@ -2067,18 +2079,18 @@ def UnselectCPlane(self, index): """ Nviz_off_cplane(self.data, index) - def SetFenceColor(self, index): - """Select current cutting plane + def SetFenceColor(self, type: int) -> None: + """Set appropriate fence color - :param index: type of fence - from 0 (off) to 4 + :param type: type of fence - from 0 (off) to 4 """ - Nviz_set_fence_color(self.data, index) + Nviz_set_fence_color(self.data, type) - def GetXYRange(self): + def GetXYRange(self) -> float: """Get xy range""" return Nviz_get_xyrange(self.data) - def GetZRange(self): + def GetZRange(self) -> tuple[float, float]: """Get z range""" min, max = c_float(), c_float() Nviz_get_zrange(self.data, byref(min), byref(max)) @@ -2105,12 +2117,12 @@ def SaveToFile(self, filename, width=20, height=20, itype="ppm"): self.ResizeWindow(widthOrig, heightOrig) - def DrawLightingModel(self): + def DrawLightingModel(self) -> None: """Draw lighting model""" if self.showLight: Nviz_draw_model(self.data) - def DrawFringe(self): + def DrawFringe(self) -> None: """Draw fringe""" Nviz_draw_fringe(self.data) @@ -2147,11 +2159,13 @@ def SetArrow(self, sx, sy, size, color): """ return Nviz_set_arrow(self.data, sx, sy, size, Nviz_color_from_str(color)) - def DeleteArrow(self): + def DeleteArrow(self) -> None: """Delete north arrow""" Nviz_delete_arrow(self.data) - def SetScalebar(self, id, sx, sy, size, color): + def SetScalebar( + self, id: int, sx: int, sy: int, size: float, color: str + ): # -> struct_scalebar_data | None: """Set scale bar from canvas coordinates :param sx,sy: canvas coordinates @@ -2163,11 +2177,11 @@ def SetScalebar(self, id, sx, sy, size, color): self.data, id, sx, sy, size, Nviz_color_from_str(color) ) - def DrawScalebar(self): + def DrawScalebar(self) -> None: """Draw scale bar""" - return Nviz_draw_scalebar(self.data) + Nviz_draw_scalebar(self.data) - def DeleteScalebar(self, id): + def DeleteScalebar(self, id: int) -> None: """Delete scalebar""" Nviz_delete_scalebar(self.data, id) @@ -2224,7 +2238,9 @@ def GetDistanceAlongSurface(self, sid, p1, p2, useExag=True): return d.value - def GetRotationParameters(self, dx, dy): + def GetRotationParameters( + self, dx: float, dy: float + ) -> tuple[float, float, float, float]: """Get rotation parameters (angle, x, y, z axes) :param dx,dy: difference from previous mouse drag event @@ -2248,7 +2264,7 @@ def GetRotationParameters(self, dx, dy): return angle, x, y, z - def Rotate(self, angle, x, y, z): + def Rotate(self, angle: float, x: float, y: float, z: float) -> None: """Set rotation parameters Rotate scene (difference from current state). @@ -2257,11 +2273,11 @@ def Rotate(self, angle, x, y, z): """ Nviz_set_rotation(angle, x, y, z) - def UnsetRotation(self): + def UnsetRotation(self) -> None: """Stop rotating the scene""" Nviz_unset_rotation() - def ResetRotation(self): + def ResetRotation(self) -> None: """Reset scene rotation""" Nviz_init_rotation() @@ -2288,7 +2304,7 @@ def FlyThrough(self, flyInfo, mode, exagInfo): """Fly through the scene :param flyInfo: fly parameters - :param mode: 0 or 1 for different fly behaviour + :param mode: 0 or 1 for different fly behavior :param exagInfo: parameters changing fly speed """ fly = (c_float * 3)() @@ -2335,13 +2351,13 @@ def __init__(self, filepath, overlayId, coords): else: self.textureId = self.Load() - def __del__(self): + def __del__(self) -> None: """Delete texture""" if self.textureId: Nviz_del_texture(self.textureId) gs.try_remove(self.path) - def Resize(self): + def Resize(self) -> None: """Resize image to match 2^n""" n = m = 1 while self.width > pow(2, n): @@ -2349,11 +2365,14 @@ def Resize(self): while self.height > pow(2, m): m += 1 self.image.Resize(size=(pow(2, n), pow(2, m)), pos=(0, 0)) - self.width = self.image.GetWidth() - self.height = self.image.GetHeight() + self.width: int = self.image.GetWidth() + self.height: int = self.image.GetHeight() + + def Load(self) -> int: + """Load image to texture - def Load(self): - """Load image to texture""" + :return: The texture id + """ bytesPerPixel = 4 if self.image.HasAlpha() else 3 bytes = bytesPerPixel * self.width * self.height rev_val = self.height - 1 @@ -2388,36 +2407,36 @@ def Load(self): return Nviz_load_image(im, self.width, self.height, self.image.HasAlpha()) - def Draw(self): + def Draw(self) -> None: """Draw texture as an image""" Nviz_draw_image( self.coords[0], self.coords[1], self.width, self.height, self.textureId ) - def HitTest(self, x, y, radius): + def HitTest(self, x: int, y: int, radius: int) -> bool: copy = Rect(self.coords[0], self.coords[1], self.orig_width, self.orig_height) copy.Inflate(radius, radius) return copy.ContainsXY(x, y) - def MoveTexture(self, dx, dy): + def MoveTexture(self, dx: int, dy: int) -> None: """Move texture on the screen""" self.coords[0] += dx self.coords[1] += dy - def SetCoords(self, coords): + def SetCoords(self, coords: tuple[int, int]) -> None: """Set coordinates""" dx = coords[0] - self.coords[0] dy = coords[1] - self.coords[1] self.MoveTexture(dx, dy) - def GetId(self): + def GetId(self) -> int: """Returns image id.""" return self.id - def SetActive(self, active=True): - self.active = active + def SetActive(self, active: bool = True) -> None: + self.active: bool = active - def IsActive(self): + def IsActive(self) -> bool: return self.active @@ -2440,11 +2459,11 @@ def GetCmd(self): """Returns overlay command.""" return self.cmd - def Corresponds(self, item): + def Corresponds(self, item) -> bool: return sorted(self.GetCmd()) == sorted(item.GetCmd()) -__all__ = [ +__all__: list[str] = [ "DM_FLAT", "DM_GOURAUD", "DM_GRID_SURF", diff --git a/gui/wxpython/photo2image/g.gui.photo2image.html b/gui/wxpython/photo2image/g.gui.photo2image.html index f958f9baf7e..af113100511 100644 --- a/gui/wxpython/photo2image/g.gui.photo2image.html +++ b/gui/wxpython/photo2image/g.gui.photo2image.html @@ -32,7 +32,7 @@

    Screenshot of g.gui.photo2image

    -Screenshot of g.gui.photo2image +Screenshot of g.gui.photo2image
    Figure: Screenshot of g.gui.photo2image
    @@ -40,7 +40,7 @@

    Screenshot of g.gui.photo2image

    For a detailed operation manual please read

    -wxGUI
    +wxGUI, wxGUI components
    diff --git a/gui/wxpython/photo2image/g.gui.photo2image.py b/gui/wxpython/photo2image/g.gui.photo2image.py index fe153b236cc..443a43f2d81 100755 --- a/gui/wxpython/photo2image/g.gui.photo2image.py +++ b/gui/wxpython/photo2image/g.gui.photo2image.py @@ -67,7 +67,7 @@ # %end """ -Module to run GCP management tool as stadalone application. +Module to run GCP management tool as standalone application. """ import os import grass.script as gs diff --git a/gui/wxpython/photo2image/ip2i_manager.py b/gui/wxpython/photo2image/ip2i_manager.py index 0e6168a8edd..dc5713e961a 100644 --- a/gui/wxpython/photo2image/ip2i_manager.py +++ b/gui/wxpython/photo2image/ip2i_manager.py @@ -20,7 +20,7 @@ @author Original author Michael Barton @author Original version improved by Martin Landa -@author Rewritten by Markus Metz redesign georectfier -> GCP Manage +@author Rewritten by Markus Metz redesign georectifier -> GCP Manage @author Support for GraphicsSet added by Stepan Turek (2012) @author Yann modified: graphical replacement of i.photo.2image (was in v6 using Vask lib) @@ -308,6 +308,7 @@ def __init__( lmgr=None, camera=None, ): + # pylint: disable=super-init-not-called; See InitMapDisplay() self.grwiz = grwiz # GR Wizard self._giface = giface @@ -336,10 +337,10 @@ def __init__( # register data structures for drawing GCP's # self.pointsToDrawTgt = self.TgtMapWindow.RegisterGraphicsToDraw( - graphicsType="point", setStatusFunc=self.SetGCPSatus + graphicsType="point", setStatusFunc=self.SetGCPStatus ) self.pointsToDrawSrc = self.SrcMapWindow.RegisterGraphicsToDraw( - graphicsType="point", setStatusFunc=self.SetGCPSatus + graphicsType="point", setStatusFunc=self.SetGCPStatus ) # connect to the map windows signals @@ -426,14 +427,13 @@ def __init__( GMessage(_("A POINTS file exists, renaming it to POINTS_BAK")) # """Make a POINTS file """ - import re try: fc = open(self.file["camera"]) fc_count = 0 for line in fc: fc_count += 1 - if re.search("NUM", line): + if "NUM" in line: storeLine = fc_count numberOfFiducial = int(line.split()[-1]) dataFiducialX = [] @@ -772,13 +772,12 @@ def SetSettings(self): font = self.GetFont() font.SetPointSize(int(spx) + 2) - textProp = {} - textProp["active"] = True - textProp["font"] = font + textProp = {"active": True, "font": font} + self.pointsToDrawSrc.SetPropertyVal("text", textProp) self.pointsToDrawTgt.SetPropertyVal("text", copy(textProp)) - def SetGCPSatus(self, item, itemIndex): + def SetGCPStatus(self, item, itemIndex): """Before GCP is drawn, decides it's colour and whether it will be drawn. """ @@ -1033,7 +1032,7 @@ def ReloadGCPs(self, event): targetMapWin.UpdateMap(render=False) def OnFocus(self, event): - # TODO: it is here just to remove old or obsolete beavior of base class + # TODO: it is here just to remove old or obsolete behavior of base class # gcp/MapPanel? # self.grwiz.SwitchEnv('source') pass @@ -1145,7 +1144,7 @@ def OnGeorectDone(self, **kargs): """Print final message""" global maptype if maptype == "raster": - return + pass def OnSettings(self, event): """GCP Manager settings""" @@ -1979,11 +1978,11 @@ def __init__( size=wx.DefaultSize, style=wx.DEFAULT_DIALOG_STYLE, ): - wx.Dialog.__init__(self, parent, id, title, pos, size, style) """ Dialog to set profile text options: font, title and font size, axis labels and font size """ + wx.Dialog.__init__(self, parent, id, title, pos, size, style) # # initialize variables # diff --git a/gui/wxpython/psmap/dialogs.py b/gui/wxpython/psmap/dialogs.py index 428ae248c87..25319e21976 100644 --- a/gui/wxpython/psmap/dialogs.py +++ b/gui/wxpython/psmap/dialogs.py @@ -792,11 +792,12 @@ def OnChoice(self, event): currPaper = self.paperTable[self.getCtrl("Format").GetSelection()] currUnit = self.unitConv.findUnit(self.getCtrl("Units").GetStringSelection()) currOrientIdx = self.getCtrl("Orientation").GetSelection() - newSize = {} - for item in self.cat[3:]: - newSize[item] = self.unitConv.convert( + newSize = { + item: self.unitConv.convert( float(currPaper[item]), fromUnit="inch", toUnit=currUnit ) + for item in self.cat[3:] + } enable = True if currPaper["Format"] != _("custom"): @@ -5470,10 +5471,11 @@ def _positionPanel(self, notebook): ) sizerR = wx.StaticBoxSizer(box, wx.VERTICAL) flexSizer = wx.FlexGridSizer(rows=3, cols=3, hgap=5, vgap=5) - ref = [] - for row in ["upper", "center", "lower"]: - for col in ["left", "center", "right"]: - ref.append(row + " " + col) + ref = [ + row + " " + col + for row in ["upper", "center", "lower"] + for col in ["left", "center", "right"] + ] self.radio = [ RadioButton(panel, id=wx.ID_ANY, label="", style=wx.RB_GROUP, name=ref[0]) ] diff --git a/gui/wxpython/psmap/frame.py b/gui/wxpython/psmap/frame.py index 22194ece0c0..bb71fd856a5 100644 --- a/gui/wxpython/psmap/frame.py +++ b/gui/wxpython/psmap/frame.py @@ -1972,7 +1972,7 @@ def OnDragging(self, event): pdcType = "rect" lineCoords = None if r[2] < 2 or r[3] < 2: - # to avoid strange behaviour + # to avoid strange behavior return self.Draw( diff --git a/gui/wxpython/psmap/g.gui.psmap.html b/gui/wxpython/psmap/g.gui.psmap.html index 02fc0d9960f..068b2ae6724 100644 --- a/gui/wxpython/psmap/g.gui.psmap.html +++ b/gui/wxpython/psmap/g.gui.psmap.html @@ -18,9 +18,9 @@

    DESCRIPTION

    Possible output files:

      -
    • ps.map instructions file -
    • PostScript/EPS file -
    • PDF (using ps2pdf) +
    • ps.map instructions file
    • +
    • PostScript/EPS file
    • +
    • PDF (using ps2pdf)
    @@ -38,23 +38,23 @@

    DESCRIPTION

    Currently supported ps.map instructions:
      -
    • paper -
    • maploc -
    • scale -
    • border -
    • raster -
    • colortable -
    • vpoints -
    • vlines -
    • vareas -
    • vlegend -
    • text -
    • scalebar -
    • mapinfo -
    • point -
    • line -
    • rectangle -
    • labels +
    • paper
    • +
    • maploc
    • +
    • scale
    • +
    • border
    • +
    • raster
    • +
    • colortable
    • +
    • vpoints
    • +
    • vlines
    • +
    • vareas
    • +
    • vlegend
    • +
    • text
    • +
    • scalebar
    • +
    • mapinfo
    • +
    • point
    • +
    • line
    • +
    • rectangle
    • +
    • labels

    CARTOGRAPHIC COMPOSER TOOLBAR

    @@ -203,7 +203,7 @@

    CARTOGRAPHIC COMPOSER TOOLBAR

    SEE ALSO

    - wxGUI
    + wxGUI, wxGUI components
    diff --git a/gui/wxpython/psmap/instructions.py b/gui/wxpython/psmap/instructions.py index dcdfb907383..4528e0a0d93 100644 --- a/gui/wxpython/psmap/instructions.py +++ b/gui/wxpython/psmap/instructions.py @@ -140,10 +140,7 @@ def AddInstruction(self, instruction): def FindInstructionByType(self, type, list=False): """Find instruction(s) with the given type""" - inst = [] - for each in self.instruction: - if each.type == type: - inst.append(each) + inst = [each for each in self.instruction if each.type == type] if len(inst) == 1 and not list: return inst[0] return inst @@ -1676,8 +1673,7 @@ def __str__(self): def Read(self, instruction, text, **kwargs): """Read instruction and save information""" - instr = {} - instr["rLegend"] = True + instr = {"rLegend": True} for line in text: try: if line.startswith("where"): @@ -1840,8 +1836,7 @@ def __str__(self): def Read(self, instruction, text, **kwargs): """Read instruction and save information""" - instr = {} - instr["vLegend"] = True + instr = {"vLegend": True} for line in text: try: if line.startswith("where"): @@ -1906,8 +1901,7 @@ def __str__(self): def Read(self, instruction, text): """Read instruction and save information""" - instr = {} - instr["isRaster"] = True + instr = {"isRaster": True} try: map = text.split()[1] except IndexError: diff --git a/gui/wxpython/psmap/utils.py b/gui/wxpython/psmap/utils.py index 4e754fafd39..10b10f96297 100644 --- a/gui/wxpython/psmap/utils.py +++ b/gui/wxpython/psmap/utils.py @@ -16,8 +16,15 @@ @author Anna Kratochvilova """ +from __future__ import annotations + +from math import ceil, cos, floor, fmod, radians, sin +from typing import overload + import wx -from math import ceil, floor, sin, cos, pi +from core.gcmd import GError, RunCommand + +import grass.script as gs try: from PIL import Image as PILImage # noqa @@ -26,9 +33,6 @@ except ImportError: havePILImage = False -import grass.script as gs -from core.gcmd import RunCommand, GError - class Rect2D(wx.Rect2D): """Class representing rectangle with floating point values. @@ -145,7 +149,17 @@ def convert(self, value, fromUnit=None, toUnit=None): return float(value) / self._units[fromUnit]["val"] * self._units[toUnit]["val"] -def convertRGB(rgb): +@overload +def convertRGB(rgb: wx.Colour) -> str: + pass + + +@overload +def convertRGB(rgb: str) -> wx.Colour | None: + pass + + +def convertRGB(rgb: wx.Colour | str) -> str | wx.Colour | None: """Converts wx.Colour(r,g,b,a) to string 'r:g:b' or named color, or named color/r:g:b string to wx.Colour, depending on input""" # transform a wx.Colour tuple into an r:g:b string @@ -159,12 +173,10 @@ def convertRGB(rgb): return name return str(rgb.Red()) + ":" + str(rgb.Green()) + ":" + str(rgb.Blue()) # transform a GRASS named color or an r:g:b string into a wx.Colour tuple - color = ( - int(gs.parse_color(rgb)[0] * 255), - int(gs.parse_color(rgb)[1] * 255), - int(gs.parse_color(rgb)[2] * 255), - ) - color = wx.Colour(*color) + parsed_color = gs.parse_color(rgb) + if parsed_color is None: + return None + color = wx.Colour(*tuple(int(x * 255) for x in parsed_color)) if color.IsOk(): return color return None @@ -396,23 +408,27 @@ def getRasterType(map): return None -def BBoxAfterRotation(w, h, angle): - """Compute bounding box or rotated rectangle +def BBoxAfterRotation(w: float, h: float, angle: float) -> tuple[int, int]: + """Compute the bounding box of a rotated rectangle :param w: rectangle width :param h: rectangle height :param angle: angle (0, 360) in degrees """ - angleRad = angle / 180.0 * pi - ct = cos(angleRad) - st = sin(angleRad) - - hct = h * ct - wct = w * ct - hst = h * st - wst = w * st + + angle = fmod(angle, 360) + angleRad: float = radians(angle) + ct: float = cos(angleRad) + st: float = sin(angleRad) + + hct: float = h * ct + wct: float = w * ct + hst: float = h * st + wst: float = w * st y = x = 0 + if angle == 0: + return (ceil(w), ceil(h)) if 0 < angle <= 90: y_min = y y_max = y + hct + wst @@ -433,7 +449,10 @@ def BBoxAfterRotation(w, h, angle): y_max = y + hct x_min = x x_max = x + wct - hst + else: + msg = "The angle argument should be between 0 and 360 degrees" + raise ValueError(msg) - width = int(ceil(abs(x_max) + abs(x_min))) - height = int(ceil(abs(y_max) + abs(y_min))) - return width, height + width: int = ceil(abs(x_max) + abs(x_min)) + height: int = ceil(abs(y_max) + abs(y_min)) + return (width, height) diff --git a/gui/wxpython/rdigit/dialogs.py b/gui/wxpython/rdigit/dialogs.py index fa9f57b8253..d0aa967a909 100644 --- a/gui/wxpython/rdigit/dialogs.py +++ b/gui/wxpython/rdigit/dialogs.py @@ -91,7 +91,7 @@ def OnBackgroundMap(self, event): ret = grast.raster_info(value) self._typeChoice.SetStringSelection(ret["datatype"]) except CalledModuleError: - return + pass def OnOK(self, event): mapName = self.GetMapName() diff --git a/gui/wxpython/rdigit/g.gui.rdigit.html b/gui/wxpython/rdigit/g.gui.rdigit.html index becb12affd7..7705ef8c7be 100644 --- a/gui/wxpython/rdigit/g.gui.rdigit.html +++ b/gui/wxpython/rdigit/g.gui.rdigit.html @@ -74,9 +74,9 @@

    EXAMPLES

    SEE ALSO

    - wxGUI
    - wxGUI components,
    - r.in.poly (backend of digitizer),
    + wxGUI, + wxGUI components, + r.in.poly (backend of digitizer), g.gui.vdigit
    diff --git a/gui/wxpython/rdigit/g.gui.rdigit.py b/gui/wxpython/rdigit/g.gui.rdigit.py index ae8d9ed41d8..5b38d858835 100755 --- a/gui/wxpython/rdigit/g.gui.rdigit.py +++ b/gui/wxpython/rdigit/g.gui.rdigit.py @@ -154,7 +154,7 @@ def _addLayer(self, name, ltype="raster"): :param str name: map name :param str ltype: layer type """ - mapLayer = self._mapObj.AddLayer( + self._mapObj.AddLayer( ltype=ltype, name=name, command=["d.rast", "map={}".format(name)], diff --git a/gui/wxpython/rdigit/toolbars.py b/gui/wxpython/rdigit/toolbars.py index d3d9aed5eb9..ba7e8e7e6bc 100644 --- a/gui/wxpython/rdigit/toolbars.py +++ b/gui/wxpython/rdigit/toolbars.py @@ -207,7 +207,7 @@ def _cellValueChanged(self): value = float(value) self._controller.SetCellValue(value) except ValueError: - return + pass def _widthValueChanged(self): value = self._widthValue.GetValue() @@ -216,7 +216,6 @@ def _widthValueChanged(self): self._controller.SetWidthValue(value) except ValueError: self._controller.SetWidthValue(0) - return def _changeDrawColor(self): color = self._color.GetColour() diff --git a/gui/wxpython/rlisetup/frame.py b/gui/wxpython/rlisetup/frame.py index 3905fa9b09d..dd93e607cea 100644 --- a/gui/wxpython/rlisetup/frame.py +++ b/gui/wxpython/rlisetup/frame.py @@ -213,12 +213,13 @@ def _layout(self): def ListFiles(self): """Check the configuration files inside the path""" # list of configuration file - listfiles = [] # return all the configuration files in self.rlipath, check if there are # link or directory and doesn't add them - for rli_conf in os.listdir(self.rlipath): - if os.path.isfile(os.path.join(self.rlipath, rli_conf)): - listfiles.append(rli_conf) + listfiles = [ + rli_conf + for rli_conf in os.listdir(self.rlipath) + if os.path.isfile(os.path.join(self.rlipath, rli_conf)) + ] return sorted(listfiles) def OnClose(self, event): diff --git a/gui/wxpython/rlisetup/functions.py b/gui/wxpython/rlisetup/functions.py index fc9d3c3d202..6a6eec30515 100644 --- a/gui/wxpython/rlisetup/functions.py +++ b/gui/wxpython/rlisetup/functions.py @@ -12,15 +12,15 @@ class SamplingType: - """ " + """ KMVWINC = samplingtype=moving, regionbox=keyboard, shape=circle KMVWINR = samplingtype moving, regionbox=keyboard, shape=rectangle MMVWINC = samplingtype=moving, regionbox=mouse, shape=circle MMVWINR = samplingtype moving, regionbox=mouse, shape=rectangle - KUNITSC = samplingtype=units, regionbox=keyboard, shape=cirlce + KUNITSC = samplingtype=units, regionbox=keyboard, shape=circle KUNITSR = samplingtype=units, regionbox=keyboard, shape=rectangle - MUNITSC = samplingtype=units, regionbox=mouse, shape=cirlce + MUNITSC = samplingtype=units, regionbox=mouse, shape=circle MUNITSR = samplingtype=units, regionbox=mouse, shape=rectangle """ @@ -141,7 +141,7 @@ def sampleAreaVector( vect=vect.split("@")[0], rast=rast.split("@")[0] ) rast_name = "{pref}{cat}".format(pref=outpref, cat=cat) - # check if raster already axist + # check if raster already exists if ( len(grass.list_strings("raster", pattern=rast_name, mapset=".")) == 1 diff --git a/gui/wxpython/rlisetup/g.gui.rlisetup.html b/gui/wxpython/rlisetup/g.gui.rlisetup.html index 1dc786e0b10..cda5d236ccd 100644 --- a/gui/wxpython/rlisetup/g.gui.rlisetup.html +++ b/gui/wxpython/rlisetup/g.gui.rlisetup.html @@ -61,14 +61,14 @@

    Usage details

    and other notes (disposition of sample areas etc). --> Configuration files are saved in the folder - C:\Users\userxy\AppData\Roaming\GRASS8\r.li\ (MS-Windows) or - $HOME/.r.li/ (GNU/Linux) (the file name can be + C:\Users\userxy\AppData\Roaming\GRASS8\r.li\ (MS-Windows) or + $HOME/.r.li/ (GNU/Linux) (the file name can be defined by the user). The output or an analysis can either be a new raster map (in case of using a "moving window" analysis) or be an ASCII text file (when not performing a "moving window" analysis) containing the result. Such text file will be saved in the folder - C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ - (MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). + C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ + (MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).
    All dimensions are percentages of raster rows or columns. @@ -78,76 +78,76 @@

    Usage details

    1. Choose file name and maps to use for setting: -
        -
      • Name for new configuration file(required): the name - of new configuration file
      • -
      • Raster map name to use to select areas (required): +
          +
        • Name for new configuration file(required): the name + of new configuration file
        • +
        • Raster map name to use to select areas (required): the name of raster map used for selecting sampling areas
        • -
        • Vector map to overlay (optional): name of a +
        • Vector map to overlay (optional): name of a vector map used for selecting sampling areas
        • -
        +
    2. Set the sampling frame. The sample frame is a rectangular area which contains all the areas to analyze. It can be defined in three ways: -
        -
      • Whole map layer: the sample frame is the whole map
      • -
      • Keyboard setting: the user enters the coordinates in - cells of upper left corner of sampling frame and its length in - rows and columns.
      • -
      • Draw the sample frame: the user draws the sample frame - on map using mouse.
      • -
      +
        +
      • Whole map layer: the sample frame is the whole map
      • +
      • Keyboard setting: the user enters the coordinates in + cells of upper left corner of sampling frame and its length in + rows and columns.
      • +
      • Draw the sample frame: the user draws the sample frame + on map using mouse.
      • +
    3. Set the sample areas. The sample areas are simply the areas to analyze. They can be defined in five ways (see the picture below): -
        -
      • Whole map layer: the sample area is the whole sample - frame
      • -
      • Regions: the user enters the number of areas and then - draws them using mouse.
      • -
      • Sample units: they are areas of rectangular or circular - shape. The user can define them using keyboard or mouse. -
          -
        • keyboard: the user define the shape of sample unists and - their disposition: -
            -
          • Random non overlapping: the user specifies - the number of sample units and they are placed in a - random way at runtime. It is guaranteed that the - areas do not intersect themselves.
          • -
          • Systematic contiguous: the defined sample - is placed covering the sample frame, side by side - across rows.
          • -
          • Systematic non contiguous: the same as above, - but here ever rectangle is spaced from another by - a specified number of cells.
          • -
          • Stratified random: the sample frame is - divided in n strats of rows and m strats of columns - (n and m are given by user), then the specified - number of sample areas are placed in a random way, - one for every m*n areas defined by strats.
          • -
          • Centered over sites: the sample areas - are placed into sample frame centering them on points - in site file.
          • -
          -
        • -
        • mouse: the user chooses the shape and then draws the - specified number of sample areas on map.
        • -
        +
          +
        • Whole map layer: the sample area is the whole sample + frame
        • +
        • Regions: the user enters the number of areas and then + draws them using mouse.
        • +
        • Sample units: they are areas of rectangular or circular + shape. The user can define them using keyboard or mouse. +
            +
          • keyboard: the user define the shape of sample unists and + their disposition: +
              +
            • Random non overlapping: the user specifies + the number of sample units and they are placed in a + random way at runtime. It is guaranteed that the + areas do not intersect themselves.
            • +
            • Systematic contiguous: the defined sample + is placed covering the sample frame, side by side + across rows.
            • +
            • Systematic non contiguous: the same as above, + but here ever rectangle is spaced from another by + a specified number of cells.
            • +
            • Stratified random: the sample frame is + divided in n strats of rows and m strats of columns + (n and m are given by user), then the specified + number of sample areas are placed in a random way, + one for every m*n areas defined by strats.
            • +
            • Centered over sites: the sample areas + are placed into sample frame centering them on points + in site file.
            • +
            +
          • +
          • mouse: the user chooses the shape and then draws the + specified number of sample areas on map.
          • +
        • -
        • Moving Window: the user defines a rectangular or - circular area, it is moved over all the raster increasing only - of a cell for every move(in columns if possible, if not in rows). - It produces a new raster containing the result of all analysis.
        • -
        • Select areas from the overlaid vector map: - the sample areas are defined by the vector map selected above. - For every cat in vector map, the procedure prompts the - user if they want to include it as sample area. - The resulting configuration file can be used only with the - specified raster map, and the procedure can be used only if - whole map layer is selected as sampling frame.
        • -
        +
      • Moving Window: the user defines a rectangular or + circular area, it is moved over all the raster increasing only + of a cell for every move(in columns if possible, if not in rows). + It produces a new raster containing the result of all analysis.
      • +
      • Select areas from the overlaid vector map: + the sample areas are defined by the vector map selected above. + For every cat in vector map, the procedure prompts the + user if they want to include it as sample area. + The resulting configuration file can be used only with the + specified raster map, and the procedure can be used only if + whole map layer is selected as sampling frame.
      • +
    @@ -165,7 +165,7 @@

    NOTES

    Screenshots of the wizard window frames:
    - +
     g.gui.rlisetup: First frame of wizard for selecting existing configuration files or creating a new one @@ -315,7 +315,7 @@

    Whole region analysis

    r.li.shannon input=lsat7_2000_40 conf=whole_region output=lsat7_2000_40_shannon -The result is the new text file "forests_p_dens7" (stored in folder $HOME/.r.li/output/. +The result is the new text file "forests_p_dens7" (stored in folder $HOME/.r.li/output/.
    See the respective modules for further examples. @@ -323,12 +323,12 @@

    REFERENCES

    McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

    SEE ALSO

    -r.li - package overview
    +r.li (package overview), r.li.daemon

    diff --git a/gui/wxpython/rlisetup/sampling_frame.py b/gui/wxpython/rlisetup/sampling_frame.py index a379ef8c09c..bc6f71f16c5 100644 --- a/gui/wxpython/rlisetup/sampling_frame.py +++ b/gui/wxpython/rlisetup/sampling_frame.py @@ -315,7 +315,6 @@ def _onToolChanged(self): def _radiusDrawn(self, x, y): """When drawing finished, get region values""" mouse = self.mapWindow.mouse - item = self._registeredGraphics.GetItem(0) p1 = mouse["begin"] p2 = mouse["end"] dist, (north, east) = self.mapWindow.Distance(p1, p2, False) @@ -328,9 +327,9 @@ def _radiusDrawn(self, x, y): circle.point[0], circle.point[1], circle.radius ) self._registeredGraphics.Draw() - self.createCricle(circle) + self.createCircle(circle) - def createCricle(self, c): + def createCircle(self, c): dlg = wx.TextEntryDialog( None, "Name of sample circle region", @@ -432,7 +431,7 @@ def _rectangleDrawn(self): dlg.Destroy() elif self.samplingtype != SamplingType.WHOLE: - """When drawing finished, get region values""" + # When drawing finished, get region values self.sampleFrameChanged.emit(region=region) diff --git a/gui/wxpython/rlisetup/wizard.py b/gui/wxpython/rlisetup/wizard.py index 6fb7988159d..fb23affbd25 100644 --- a/gui/wxpython/rlisetup/wizard.py +++ b/gui/wxpython/rlisetup/wizard.py @@ -4,8 +4,7 @@ @brief GUI per r.li.setup module Classes: - - RLiSetupFrame (first frame to show existing conf file and choose some - operation) + - RLiSetupFrame (first frame to show existing conf file and choose some operation) - RLIWizard (the main wizard) - FirstPage (first page of wizard, choose name of conf file, raster, vector, sampling region) @@ -20,28 +19,33 @@ @author Luca Delucchi """ +from __future__ import annotations + import os +from typing import TYPE_CHECKING import wx from core.globalvar import wxPythonPhoenix -if wxPythonPhoenix: +if wxPythonPhoenix or TYPE_CHECKING: from wx import adv as wiz from wx.adv import Wizard else: from wx import wizard as wiz from wx.wizard import Wizard -import wx.lib.scrolledpanel as scrolled +import wx.lib.scrolledpanel as scrolled +from core.gcmd import GError, GMessage, RunCommand from gui_core import gselect from gui_core.wrap import Button, StaticText, TextCtrl from location_wizard.wizard import GridBagSizerTitledPage as TitledPage from rlisetup.functions import checkValue, retRLiPath from rlisetup.sampling_frame import RLiSetupMapPanel + +from grass.exceptions import CalledModuleError from grass.script import core as grass from grass.script import raster as grast from grass.script import vector as gvect -from grass.exceptions import CalledModuleError from .functions import ( SamplingType, @@ -50,7 +54,9 @@ obtainCategories, sampleAreaVector, ) -from core.gcmd import GError, GMessage, RunCommand + +if TYPE_CHECKING: + from wx.adv import WizardEvent class RLIWizard: @@ -360,7 +366,7 @@ def _write_area(self, fil): fil.write("SAMPLEAREA -1|-1|%r|%r" % (rl, cl)) fil.write("|%s" % self.msAreaList[0].raster) fil.write("\nMOVINGWINDOW\n") - # KUNITSC = samplingtype=units, regionbox=keyboard, shape=cirlce + # KUNITSC = samplingtype=units, regionbox=keyboard, shape=circle # KUNITSR = samplingtype=units, regionbox=keyboard, shape=rectangle elif samtype in {SamplingType.KUNITSC, SamplingType.KUNITSR}: if samtype == SamplingType.KUNITSC: @@ -374,13 +380,13 @@ def _write_area(self, fil): fil.write("SAMPLEAREA -1|-1|%r|%r\n" % (rl, cl)) if self.units.distrtype == "non_overlapping": fil.write("RANDOMNONOVERLAPPING %s\n" % self.units.distr1) - elif self.units.distrtype == "systematic_contiguos": + elif self.units.distrtype == "systematic_contiguous": fil.write("SYSTEMATICCONTIGUOUS\n") elif self.units.distrtype == "stratified_random": fil.write( "STRATIFIEDRANDOM %s|%s\n" % (self.units.distr1, self.units.distr2) ) - elif self.units.distrtype == "systematic_noncontiguos": + elif self.units.distrtype == "systematic_noncontiguous": fil.write("SYSTEMATICNONCONTIGUOUS %s\n" % self.units.distr1) elif self.units.distrtype == "centered_oversites": fil.write("") @@ -388,7 +394,7 @@ def _write_area(self, fil): # elif self.samplingareapage.samplingtype == SamplingType.UNITS and # self.samplingareapage.regionbox=='mouse': - # MUNITSC = samplingtype=units, regionbox=mouse, shape=cirlce + # MUNITSC = samplingtype=units, regionbox=mouse, shape=circle # MUNITSR = samplingtype=units, regionbox=mouse, shape=rectangle elif self.samplingareapage.samplingtype in { SamplingType.MUNITSR, @@ -687,12 +693,12 @@ def OnVector(self, event): def OnLayer(self, event): try: self.vectorlayer = self.vectlayer.GetValue() - except: + except AttributeError: self.vectorlayer = None next = wx.FindWindowById(wx.ID_FORWARD) next.Enable(self.CheckInput()) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Sets the default values, for the entire map""" next = wx.FindWindowById(wx.ID_FORWARD) next.Enable(self.CheckInput()) @@ -721,9 +727,7 @@ def CheckVector(self, vector): ) return False, [] if links > 0: - layers = [] - for i in range(1, links + 1): - layers.append(str(i)) + layers = [str(i) for i in range(1, links + 1)] return True, layers return False, [] @@ -735,7 +739,7 @@ def CheckInput(self): """ return bool(self.conf_name and bool(self.rast and bool(self.VectorEnabled))) - def OnExitPage(self, event=None): + def OnExitPage(self, event: WizardEvent | None = None) -> None: """Function during exiting""" next = wx.FindWindowById(wx.ID_FORWARD) next.Enable(self.CheckInput()) @@ -749,7 +753,6 @@ def OnExitPage(self, event=None): elif self.region == "draw": self.SetNext(self.parent.drawsampleframepage) self.parent.samplingareapage.SetPrev(self.parent.drawsampleframepage) - return class KeyboardPage(TitledPage): @@ -891,7 +894,7 @@ def OnEnterPage(self, event): self.ColLentxt.SetValue(self.col_len) self.RowLentxt.SetValue(self.row_len) - def OnExitPage(self, event=None): + def OnExitPage(self, event: WizardEvent | None = None) -> None: """Function during exiting""" if ( self.row_len == "" @@ -923,7 +926,7 @@ def SampleFrameChanged(self, region): else: wx.FindWindowById(wx.ID_FORWARD).Enable(False) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Function during entering""" if self.mapPanel is None: self.mapPanel = RLiSetupMapPanel(self, samplingType="drawFrame") @@ -951,7 +954,7 @@ def OnEnterPage(self, event): render=True, ) - def OnExitPage(self, event=None): + def OnExitPage(self, event: WizardEvent | None = None) -> None: """Function during exiting""" if event.GetDirection(): self.SetNext(self.parent.samplingareapage) @@ -1071,7 +1074,7 @@ def OnNumRegions(self, event): else: wx.FindWindowById(wx.ID_FORWARD).Enable(False) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Insert values into text controls for summary of location creation options """ @@ -1230,7 +1233,7 @@ def afterRegionDrawn(self, marea): ) wx.FindWindowById(wx.ID_FORWARD).Enable(False) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Function during entering""" if self.parent.samplingareapage.samplingtype == SamplingType.WHOLE: self.title.SetLabel(_("Draw moving windows region")) @@ -1265,12 +1268,12 @@ def OnEnterPage(self, event): render=True, ) - # def OnExitPage(self, event=None): - # Function during exiting - # print event.GetDirection() - # if event.GetDirection(): - # self.SetNext(self.parent.samplingareapage) - # self.parent.samplingareapage.SetPrev(self) + # def OnExitPage(self, event: WizardEvent | None = None) -> None: + # """Function during exiting""" + # print(event.GetDirection()) + # if event.GetDirection(): + # self.SetNext(self.parent.samplingareapage) + # self.parent.samplingareapage.SetPrev(self) class SampleUnitsKeyPage(TitledPage): @@ -1400,14 +1403,14 @@ def __init__(self, wizard, parent): # self.Bind(wiz.EVT_WIZARD_PAGE_CHANGING, self.OnExitPage) self.OnType(None) - def OnEnterPage(self, event=None): + def OnEnterPage(self, event: WizardEvent | None = None) -> None: """Function during entering""" # This is an hack to force the user to choose Rectangle or Circle self.typeBox.SetSelection(2), self.typeBox.ShowItem(2, False) self.panelSizer.Layout() - def OnExitPage(self, event=None): + def OnExitPage(self, event: WizardEvent | None = None) -> None: """Function during exiting""" if event.GetDirection(): self.SetNext(self.parent.summarypage) @@ -1434,7 +1437,7 @@ def OnDistr(self, event): self.panelSizer.Hide(self.distr2Txt) self.panelSizer.Layout() elif chosen == 1: - self.distrtype = "systematic_contiguos" + self.distrtype = "systematic_contiguous" self.distr1Label.SetLabel("") self.distr2Label.SetLabel("") self.panelSizer.Hide(self.distr1Txt) @@ -1448,7 +1451,7 @@ def OnDistr(self, event): self.panelSizer.Show(self.distr2Txt) self.panelSizer.Layout() elif chosen == 3: - self.distrtype = "systematic_noncontiguos" + self.distrtype = "systematic_noncontiguous" self.distr1Label.SetLabel(_("Insert distance between units")) self.panelSizer.Show(self.distr1Txt) self.distr2Label.SetLabel("") @@ -1545,7 +1548,7 @@ def __init__(self, wizard, parent): self.heightTxt.Bind(wx.EVT_TEXT, self.OnHeight) wx.FindWindowById(wx.ID_FORWARD).Enable(False) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: # This is an hack to force the user to choose Rectangle or Circle # self.typeBox.SetSelection(2), # self.typeBox.ShowItem(2, False) @@ -1634,7 +1637,7 @@ def __init__(self, wizard, parent): self.OnType(None) self.regionNumTxt.SetValue("") - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Function during entering""" if self.numregions: wx.FindWindowById(wx.ID_FORWARD).Enable(True) @@ -1693,7 +1696,7 @@ def OnNumRegions(self, event): else: wx.FindWindowById(wx.ID_FORWARD).Enable(False) - def OnExitPage(self, event=None): + def OnExitPage(self, event: WizardEvent | None = None) -> None: """Function during exiting""" if event.GetDirection(): self.SetNext(self.drawsampleunitspage) @@ -1730,7 +1733,7 @@ def SampleFrameChanged(self, region): ) wx.FindWindowById(wx.ID_FORWARD).Enable(False) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Function during entering""" if self.parent.samplingareapage.samplingtype in { @@ -1777,9 +1780,8 @@ def OnEnterPage(self, event): render=True, ) - def OnExitPage(self, event=None): + def OnExitPage(self, event: WizardEvent | None = None) -> None: """Function during exiting""" - # if event.GetDirection(): # self.SetNext(self.parent.samplingareapage) # self.parent.samplingareapage.SetPrev(self) @@ -1889,7 +1891,7 @@ def newCat(self): self.map_.DeleteLayer(layer) self.areaText.SetLabel("Is this area (cat={n}) ok?".format(n=cat)) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Function during entering: draw the raster map and the first vector feature""" if self.mapPanel is None: @@ -1929,7 +1931,7 @@ def OnEnterPage(self, event): ) self.newCat() - def OnExitPage(self, event=None): + def OnExitPage(self, event: WizardEvent | None = None) -> None: """Function during exiting""" grass.del_temp_region() @@ -2136,7 +2138,7 @@ def __init__(self, wizard, parent): flag=wx.ALIGN_LEFT | wx.ALIGN_CENTER_VERTICAL | wx.ALL, ) - def OnEnterPage(self, event): + def OnEnterPage(self, event: WizardEvent) -> None: """Insert values into text controls for summary of location creation options """ @@ -2182,7 +2184,7 @@ def OnEnterPage(self, event): if self.parent.units.distrtype == "non_overlapping": self.unitsmorelabel.SetLabel(_("Number sampling units:")) self.unitsmoretxt.SetLabel(self.parent.units.distr1) - elif self.parent.units.distrtype == "systematic_noncontiguos": + elif self.parent.units.distrtype == "systematic_noncontiguous": self.unitsmorelabel.SetLabel(_("Distance between units:")) self.unitsmoretxt.SetLabel(self.parent.units.distr1) elif self.parent.units.distrtype == "stratified_random": diff --git a/gui/wxpython/startup/locdownload.py b/gui/wxpython/startup/locdownload.py index ccbeaa8fb0c..fd2f174df14 100644 --- a/gui/wxpython/startup/locdownload.py +++ b/gui/wxpython/startup/locdownload.py @@ -32,10 +32,13 @@ set_gui_path() +# flake8: noqa: E402 from core.debug import Debug from core.gthread import gThread from gui_core.wrap import Button, StaticText +# flakes8: qa + # TODO: labels (and descriptions) translatable? LOCATIONS = [ @@ -93,7 +96,7 @@ def write(self, string): heigth = self._get_heigth(string) wx.CallAfter(self.out.SetLabel, string) self._resize(heigth) - except: + except wx.PyDeadObjectError: # window closed -> PyDeadObjectError pass @@ -228,9 +231,7 @@ def __init__(self, parent, database, locations=LOCATIONS): parent=self, label=_("Select sample project to download:") ) - choices = [] - for item in self.locations: - choices.append(item["label"]) + choices = [item["label"] for item in self.locations] self.choice = wx.Choice(parent=self, choices=choices) self.choice.Bind(wx.EVT_CHOICE, self.OnChangeChoice) diff --git a/gui/wxpython/timeline/frame.py b/gui/wxpython/timeline/frame.py index c5c54982ed3..68a30a0b54b 100644 --- a/gui/wxpython/timeline/frame.py +++ b/gui/wxpython/timeline/frame.py @@ -264,7 +264,6 @@ def _getData(self, timeseries): def _draw3dFigure(self): """Draws 3d view (spatio-temporal extents). - Only for matplotlib versions >= 1.0.0. Earlier versions cannot draw time ticks and alpha and it has a slightly different API. @@ -640,7 +639,6 @@ class DataCursor: """A simple data cursor widget that displays the x,y location of a matplotlib artist when it is selected. - Source: https://stackoverflow.com/questions/4652439/ is-there-a-matplotlib-equivalent-of-matlabs-datacursormode/4674445 """ diff --git a/gui/wxpython/timeline/g.gui.timeline.html b/gui/wxpython/timeline/g.gui.timeline.html index a9d07affdf5..443e07417b4 100644 --- a/gui/wxpython/timeline/g.gui.timeline.html +++ b/gui/wxpython/timeline/g.gui.timeline.html @@ -26,8 +26,8 @@

    NOTES

    SEE ALSO

    - Temporal data processing
    - wxGUI
    + Temporal data processing, + wxGUI, wxGUI components
    diff --git a/gui/wxpython/timeline/g.gui.timeline.py b/gui/wxpython/timeline/g.gui.timeline.py index 161c6832a92..3265636a3b0 100755 --- a/gui/wxpython/timeline/g.gui.timeline.py +++ b/gui/wxpython/timeline/g.gui.timeline.py @@ -50,7 +50,7 @@ def main(): from timeline.frame import TimelineFrame except ImportError as e: # TODO: why do we need this special check here, the reason of error - # is wrong intallation or something, no need to report this to the + # is wrong installation or something, no need to report this to the # user in a nice way gs.fatal(str(e)) diff --git a/gui/wxpython/tools/build_modules_xml.py b/gui/wxpython/tools/build_modules_xml.py index a97bdeb4627..2a69ce6a696 100644 --- a/gui/wxpython/tools/build_modules_xml.py +++ b/gui/wxpython/tools/build_modules_xml.py @@ -70,7 +70,7 @@ def parse_modules(fd): def get_module_metadata(name): - """ + """Gets the module's metadata for a given module name >>> get_module_metadata("g.region") ('Manages the boundary definitions for the geographic region.', ['general', 'settings']) diff --git a/gui/wxpython/tools/update_menudata.py b/gui/wxpython/tools/update_menudata.py index 08897e6147d..18c542c71f2 100644 --- a/gui/wxpython/tools/update_menudata.py +++ b/gui/wxpython/tools/update_menudata.py @@ -71,9 +71,7 @@ def updateData(data, modules): if node.tag != "menuitem": continue - item = {} - for child in node: - item[child.tag] = child.text + item = {child.tag: child.text for child in node} if "command" not in item: continue diff --git a/gui/wxpython/tplot/frame.py b/gui/wxpython/tplot/frame.py index db798081c51..831d06e93a8 100755 --- a/gui/wxpython/tplot/frame.py +++ b/gui/wxpython/tplot/frame.py @@ -85,7 +85,7 @@ def check_version(*version) -> bool: versionInstalled.append(v) except ValueError: versionInstalled.append(0) - return not versionInstalled < list(version) + return versionInstalled >= list(version) def findBetween(s, first, last): @@ -212,7 +212,7 @@ def _layout(self): self.coorval = gselect.CoordinatesSelect( parent=self.controlPanelRaster, giface=self._giface ) - except: + except NotImplementedError: self.coorval = TextCtrl( parent=self.controlPanelRaster, id=wx.ID_ANY, @@ -281,7 +281,7 @@ def _layout(self): self.cats = gselect.VectorCategorySelect( parent=self.controlPanelVector, giface=self._giface ) - except: + except NotImplementedError: self.cats = TextCtrl( parent=self.controlPanelVector, id=wx.ID_ANY, @@ -760,8 +760,7 @@ def _writeCSV(self, x, y): with open(self.csvpath, "w", newline="") as fi: writer = csv.writer(fi) if self.header: - head = ["Time"] - head.extend(self.yticksNames) + head = ["Time", *self.yticksNames] writer.writerow(head) writer.writerows(zipped) @@ -1029,10 +1028,10 @@ def OnRedraw(self, event=None): try: getcoors = self.coorval.coordsField.GetValue() - except: + except AttributeError: try: getcoors = self.coorval.GetValue() - except: + except AttributeError: getcoors = None if getcoors and getcoors != "": try: @@ -1257,7 +1256,7 @@ def SetDatasets( return try: self.coorval.coordsField.SetValue(",".join(coors)) - except: + except AttributeError: self.coorval.SetValue(",".join(coors)) if self.datasetsV: vdatas = ",".join(f"{x[0]}@{x[1]}" for x in self.datasetsV) @@ -1337,10 +1336,11 @@ def AddDataset(self, yranges, xranges, datasetName): self.data[datasetName][xranges[i]] = yranges[i] def GetInformation(self, x): - values = {} - for key, value in self.data.items(): - if value[x]: - values[key] = [self.convert(x), value[x]] + values = { + key: [self.convert(x), value[x]] + for key, value in self.data.items() + if value[x] + } if len(values) == 0: return None @@ -1372,7 +1372,6 @@ class DataCursor: """A simple data cursor widget that displays the x,y location of a matplotlib artist when it is selected. - Source: https://stackoverflow.com/questions/4652439/ is-there-a-matplotlib-equivalent-of-matlabs-datacursormode/4674445 """ @@ -1448,7 +1447,7 @@ def __call__(self, event): """Intended to be called through "mpl_connect".""" # Rather than trying to interpolate, just display the clicked coords # This will only be called if it's within "tolerance", anyway. - x, y = event.mouseevent.xdata, event.mouseevent.ydata + x = event.mouseevent.xdata annotation = self.annotations[event.artist.axes] if x is not None: if not self.display_all: @@ -1460,7 +1459,7 @@ def __call__(self, event): for a in event.artist.get_xdata(): try: d = self.convert(a) - except: + except (IndexError, ValueError): d = a xData.append(d) x = xData[np.argmin(abs(xData - x))] diff --git a/gui/wxpython/tplot/g.gui.tplot.html b/gui/wxpython/tplot/g.gui.tplot.html index f5293668ba9..10ddafa6245 100644 --- a/gui/wxpython/tplot/g.gui.tplot.html +++ b/gui/wxpython/tplot/g.gui.tplot.html @@ -20,7 +20,7 @@

    DESCRIPTION

  • add title to the plot, and
  • export the time series values to a CSV file (x axis data has date time string format, if you want to use for calculating simple regression model in the - R environment, + R environment, LibreOffice etc., you will obtain a different calculated formula
    y = a + b*x
    because these software packages use a reference date other than @@ -65,8 +65,8 @@

    NOTES

    SEE ALSO

    - Temporal data processing
    - wxGUI
    + Temporal data processing, + wxGUI, wxGUI components
    diff --git a/gui/wxpython/vdigit/dialogs.py b/gui/wxpython/vdigit/dialogs.py index 2e7333cf122..27d34601459 100644 --- a/gui/wxpython/vdigit/dialogs.py +++ b/gui/wxpython/vdigit/dialogs.py @@ -75,9 +75,7 @@ def __init__( for layer in cats[line].keys(): self.cats[line][layer] = list(cats[line][layer]) - layers = [] - for layer in self.digit.GetLayers(): - layers.append(str(layer)) + layers = [str(layer) for layer in self.digit.GetLayers()] # make copy of cats (used for 'reload') self.cats_orig = copy.deepcopy(self.cats) @@ -114,9 +112,7 @@ def __init__( self.fidText.SetLabel(str(self.fid)) else: self.fidText.Show(False) - choices = [] - for fid in self.cats.keys(): - choices.append(str(fid)) + choices = [str(fid) for fid in self.cats.keys()] self.fidMulti.SetItems(choices) self.fidMulti.SetSelection(0) @@ -387,7 +383,7 @@ def OnReload(self, event): # restore original list self.cats = copy.deepcopy(self.cats_orig) - # polulate list + # populate list self.itemDataMap = self.list.Populate(self.cats[self.fid], update=True) event.Skip() @@ -529,7 +525,7 @@ def UpdateDialog(self, query=None, cats=None): # make copy of cats (used for 'reload') self.cats_orig = copy.deepcopy(self.cats) - # polulate list + # populate list self.fid = list(self.cats.keys())[0] self.itemDataMap = self.list.Populate(self.cats[self.fid], update=True) @@ -546,9 +542,7 @@ def UpdateDialog(self, query=None, cats=None): else: self.fidText.Show(False) self.fidMulti.Show(True) - choices = [] - for fid in self.cats.keys(): - choices.append(str(fid)) + choices = [str(fid) for fid in self.cats.keys()] self.fidMulti.SetItems(choices) self.fidMulti.SetSelection(0) diff --git a/gui/wxpython/vdigit/g.gui.vdigit.html b/gui/wxpython/vdigit/g.gui.vdigit.html index 7b0c7165063..766c0209270 100644 --- a/gui/wxpython/vdigit/g.gui.vdigit.html +++ b/gui/wxpython/vdigit/g.gui.vdigit.html @@ -159,57 +159,57 @@

    DIGITIZER TOOLBAR

    • Break selected lines/boundaries at intersection
      Split - given vector line or boundary into two lines on given position - (based on v.clean, - tool=break).
    • + given vector line or boundary into two lines on given position + (based on v.clean, + tool=break).
    • Connect two selected lines/boundaries
      Connect selected - lines or boundaries, the first given line is connected to the - second one. The second line is broken if necessary on each intersection. - The lines are connected only if distance between them is not greater - than snapping threshold value.
    • + lines or boundaries, the first given line is connected to the + second one. The second line is broken if necessary on each intersection. + The lines are connected only if distance between them is not greater + than snapping threshold value.
    • Copy categories
      Copy category settings of - selected vector feature to other vector - features. Layer/category pairs of source vector features are - appended to the target feature category settings. Existing - layer/category pairs are not removed from category settings of - the target features.
    • + selected vector feature to other vector + features. Layer/category pairs of source vector features are + appended to the target feature category settings. Existing + layer/category pairs are not removed from category settings of + the target features.
    • Copy features from (background) vector map
      Make identical copy of - selected vector features. If a background vector map has been - selected from the Layer Manager, copy features from background - vector map, not from the currently modified vector map.
    • + selected vector features. If a background vector map has been + selected from the Layer Manager, copy features from background + vector map, not from the currently modified vector map.
    • Copy attributes
      Duplicate attributes settings of - selected vector feature to other vector features. New - category(ies) is appended to the target feature category - settings and attributes duplicated based on category settings - of source vector features. Existing layer/category pairs are - not removed from category settings of the target - features.
    • + selected vector feature to other vector features. New + category(ies) is appended to the target feature category + settings and attributes duplicated based on category settings + of source vector features. Existing layer/category pairs are + not removed from category settings of the target + features.
    • Feature type conversion
      Change feature type of selected - geometry features. Points are converted to centroids, - centroids to points, lines to boundaries and boundaries to - lines.
    • + geometry features. Points are converted to centroids, + centroids to points, lines to boundaries and boundaries to + lines.
    • Flip selected lines/boundaries
      Flip direction of - selected linear features (lines or boundaries).
    • + selected linear features (lines or boundaries).
    • Merge selected lines/boundaries
      Merge (at least two) - selected vector lines or boundaries. The geometry of the - merged vector lines can be changed. If the second line from - two selected lines is in opposite direction to the first, it - will be flipped. See also - module v.build.polylines.
    • + selected vector lines or boundaries. The geometry of the + merged vector lines can be changed. If the second line from + two selected lines is in opposite direction to the first, it + will be flipped. See also + module v.build.polylines.
    • Snap selected lines/boundaries (only to nodes)
      Snap - vector features in given threshold. See also - module v.clean. Note that - this tool supports only snapping to nodes. Snapping to vector - features from background vector map is not currently - supported.
    • + vector features in given threshold. See also + module v.clean. Note that + this tool supports only snapping to nodes. Snapping to vector + features from background vector map is not currently + supported.
    • Split line/boundary
      Split selected line or boundary on given position.
    • @@ -218,7 +218,7 @@

      DIGITIZER TOOLBAR

      min/max length value (linear features or dangles).
    • Z-bulk labeling of 3D lines
      Assign z coordinate values to 3D - vector lines in bounding box. This is useful for labeling contour lines.
    • + vector lines in bounding box. This is useful for labeling contour lines.
    @@ -248,9 +248,9 @@

    DIGITIZER TOOLBAR

    NOTES

    Mouse button functions:
    -
    Left - select or deselect features
    -
    Control+Left - cancel action or undo vertex when digitizing lines
    -
    Right - confirm action
    +
    Left - select or deselect features
    +
    Control+Left - cancel action or undo vertex when digitizing lines
    +
    Right - confirm action

    @@ -288,8 +288,8 @@

    REFERENCES

    SEE ALSO

    -wxGUI
    -wxGUI components + wxGUI, + wxGUI components

    diff --git a/gui/wxpython/vdigit/g.gui.vdigit.py b/gui/wxpython/vdigit/g.gui.vdigit.py index be476a03ee1..c4b4f93ae7e 100644 --- a/gui/wxpython/vdigit/g.gui.vdigit.py +++ b/gui/wxpython/vdigit/g.gui.vdigit.py @@ -84,7 +84,7 @@ def __init__(self, parent, vectorMap): ) self._initShortcuts() - # this giface issue not solved yet, we must set mapframe aferwards + # this giface issue not solved yet, we must set mapframe afterwards self._giface._mapframe = self # load vector map mapLayer = self.GetMap().AddLayer( diff --git a/gui/wxpython/vdigit/mapwindow.py b/gui/wxpython/vdigit/mapwindow.py index db9ebf57caf..08a86857c12 100644 --- a/gui/wxpython/vdigit/mapwindow.py +++ b/gui/wxpython/vdigit/mapwindow.py @@ -294,7 +294,7 @@ def OnLeftDownAddLine(self, event): return if self.toolbar.GetAction("type") in {"point", "centroid"}: - # add new point / centroiud + # add new point / centroid east, north = self.Pixel2Cell(self.mouse["begin"]) nfeat, fids = self.digit.AddFeature( self.toolbar.GetAction("type"), [(east, north)] @@ -690,7 +690,7 @@ def OnLeftDownUndo(self, event): def _onLeftDown(self, event): """Left mouse button donw - vector digitizer various actions""" try: - mapLayer = self.toolbar.GetLayer().GetName() + self.toolbar.GetLayer().GetName() except: GMessage(parent=self, message=_("No vector map selected for editing.")) event.Skip() diff --git a/gui/wxpython/vdigit/preferences.py b/gui/wxpython/vdigit/preferences.py index b8fd9096510..949b4e5ccae 100644 --- a/gui/wxpython/vdigit/preferences.py +++ b/gui/wxpython/vdigit/preferences.py @@ -525,7 +525,6 @@ def _createAttributesPage(self, notebook): # settings flexSizer = wx.FlexGridSizer(cols=2, hgap=3, vgap=3) flexSizer.AddGrowableCol(0) - settings = ((_("Layer"), 1), (_("Category"), 1), (_("Mode"), _("Next to use"))) # layer text = StaticText(parent=panel, id=wx.ID_ANY, label=_("Layer")) self.layer = SpinCtrl(parent=panel, id=wx.ID_ANY, min=1, max=1e3) @@ -990,8 +989,8 @@ def UpdateSettings(self): tree.SetLayerInfo(item, key="vdigit", value={"geomAttr": {}}) if checked: # enable - _type = key if key == "area" else "length" - unitsKey = Units.GetUnitsKey(_type, unitsIdx) + type_ = key if key == "area" else "length" + unitsKey = Units.GetUnitsKey(type_, unitsIdx) tree.GetLayerInfo(item, key="vdigit")["geomAttr"][key] = { "column": column, "units": unitsKey, diff --git a/gui/wxpython/vdigit/toolbars.py b/gui/wxpython/vdigit/toolbars.py index 09833758882..61b79235193 100644 --- a/gui/wxpython/vdigit/toolbars.py +++ b/gui/wxpython/vdigit/toolbars.py @@ -446,7 +446,7 @@ def _noVMapOpenForEditingErrDlg(self): return True def OnTool(self, event): - """Tool selected -> untoggles previusly selected tool in + """Tool selected -> untoggles previously selected tool in toolbar""" Debug.msg( 3, @@ -478,7 +478,7 @@ def OnTool(self, event): event.Skip() def OnAddPoint(self, event): - """Add point to the vector map Laier""" + """Add point to the vector map layer""" Debug.msg(2, "VDigitToolbar.OnAddPoint()") self.action = {"desc": "addLine", "type": "point", "id": self.addPoint} self.MapWindow.mouse["box"] = "point" @@ -705,7 +705,6 @@ def OnHelp(self, event): def OnAdditionalToolMenu(self, event): """Menu for additional tools""" - point = wx.GetMousePosition() toolMenu = Menu() for label, itype, handler, desc in ( diff --git a/gui/wxpython/vdigit/wxdigit.py b/gui/wxpython/vdigit/wxdigit.py index 10cac8710b5..b4b7b0dfd7b 100644 --- a/gui/wxpython/vdigit/wxdigit.py +++ b/gui/wxpython/vdigit/wxdigit.py @@ -339,9 +339,6 @@ def _breakLineAtIntersection(self, line, pointsLine): if Vect_read_line(self.poMapInfo, self.poPoints, None, line) < 0: self._error.ReadLine(line) return -1 - points = self.poPoints - else: - points = pointsLine listLine = Vect_new_boxlist(0) listRef = Vect_new_list() @@ -667,7 +664,7 @@ def _getLineAreaBboxCats(self, ln_id): ltype = Vect_read_line(self.poMapInfo, None, None, ln_id) if ltype == GV_CENTROID: - # TODO centroid opttimization, can be edited also its area -> it + # TODO centroid optimization, can be edited also its area -> it # will appear two times in new_ lists return self._getCentroidAreaBboxCats(ln_id) return [self._getBbox(ln_id)], [self._getLineAreasCategories(ln_id)] @@ -692,8 +689,7 @@ def _getaAreaBboxCats(self, area): :param area: area id :return: list of categories :func:`_getLineAreasCategories` and - list of bboxes :func:`_getBbox` of area boundary - features + list of bboxes :func:`_getBbox` of area boundary features """ po_b_list = Vect_new_list() Vect_get_area_boundaries(self.poMapInfo, area, po_b_list) @@ -826,18 +822,13 @@ def _createBbox(self, points): return bbox def _convertGeom(self, poPoints): - """Helper function convert geom from ctypes line_pts to python - list + """Helper function convert geom from ctypes line_pts to Python list :return: coords in python list [(x, y),...] """ Points = poPoints.contents - pts_geom = [] - for j in range(Points.n_points): - pts_geom.append((Points.x[j], Points.y[j])) - - return pts_geom + return [(Points.x[j], Points.y[j]) for j in range(Points.n_points)] def MoveSelectedLines(self, move): """Move selected features @@ -1095,7 +1086,6 @@ def EditLine(self, line, coords): self.poMapInfo, line, ltype, self.poPoints, self.poCats ) if newline > 0 and self.emit_signals: - new_geom = [self._getBbox(newline)] new_areas_cats = [self._getLineAreasCategories(newline)] if newline > 0 and self._settings["breakLines"]: @@ -1124,8 +1114,6 @@ def FlipLine(self): if not self._checkMap(): return -1 - nlines = Vect_get_num_lines(self.poMapInfo) - poList = self._display.GetSelectedIList() ret = Vedit_flip_lines(self.poMapInfo, poList) Vect_destroy_list(poList) diff --git a/gui/wxpython/vdigit/wxdisplay.py b/gui/wxpython/vdigit/wxdisplay.py index 39662afb19f..6679e5c5814 100644 --- a/gui/wxpython/vdigit/wxdisplay.py +++ b/gui/wxpython/vdigit/wxdisplay.py @@ -373,6 +373,8 @@ def _definePen(self, rtype): :return: pen, brush """ + + key = None if rtype == TYPE_POINT: key = "point" elif rtype == TYPE_LINE: @@ -561,7 +563,7 @@ def _validLine(self, line) -> bool: :return: True valid feature id :return: False invalid """ - return bool(line > 0 and line <= Vect_get_num_lines(self.poMapInfo)) + return bool(0 < line <= Vect_get_num_lines(self.poMapInfo)) def SelectLinesByBox(self, bbox, ltype=None, drawSeg=False, poMapInfo=None): """Select vector objects by given bounding box @@ -712,7 +714,7 @@ def SelectLineByPoint(self, point, ltype=None, poMapInfo=None): pz = c_double() if not self._validLine(lineNearest): return {"line": -1, "point": None} - ftype = Vect_read_line(poMapInfo, self.poPoints, self.poCats, lineNearest) + Vect_read_line(poMapInfo, self.poPoints, self.poCats, lineNearest) Vect_line_distance( self.poPoints, point[0], @@ -856,7 +858,7 @@ def GetSelectedVertex(self, pos): if not self._validLine(line): return -1 - ftype = Vect_read_line(self.poMapInfo, self.poPoints, self.poCats, line) + Vect_read_line(self.poMapInfo, self.poPoints, self.poCats, line) minDist = 0.0 Gid = -1 @@ -911,7 +913,7 @@ def GetRegionSelected(self): for line in self.selected["ids"]: area = Vect_get_centroid_area(self.poMapInfo, line) - if area > 0 and area <= nareas: + if 0 < area <= nareas: if not Vect_get_area_box(self.poMapInfo, area, byref(lineBox)): continue else: # noqa: PLR5501 diff --git a/gui/wxpython/vnet/dialogs.py b/gui/wxpython/vnet/dialogs.py index 2979429a572..ca928a6b164 100644 --- a/gui/wxpython/vnet/dialogs.py +++ b/gui/wxpython/vnet/dialogs.py @@ -581,13 +581,10 @@ def _updateResultDbMgrPage(self): def OnPageChanged(self, event): """Tab switched""" if event.GetEventObject() == self.notebook: - dbMgrIndxs = [] - dbMgrIndxs.extend( - ( - self.notebook.GetPageIndexByName("inputDbMgr"), - self.notebook.GetPageIndexByName("resultDbMgr"), - ) - ) + dbMgrIndxs = [ + self.notebook.GetPageIndexByName("inputDbMgr"), + self.notebook.GetPageIndexByName("resultDbMgr"), + ] if self.notebook.GetSelection() in dbMgrIndxs: self.stBar.AddStatusItem( text=_("Loading tables..."), @@ -834,9 +831,7 @@ def OnNLayerSel(self, event): self._setInputData() def _setInputData(self): - params = {} - for k, v in self.inputData.items(): - params[k] = v.GetValue() + params = {k: v.GetValue() for k, v in self.inputData.items()} flags = {} self.vnet_mgr.SetParams(params, flags) @@ -1218,7 +1213,7 @@ def OnItemSelected(self, event): self.pts_data.SetSelected(self.selectedkey) def OnCheckItem(self, index, flag): - "flag is True if the item was checked, False if unchecked" + """flag is True if the item was checked, False if unchecked""" key = self.GetItemData(index) if self.pts_data.GetPointData(key)["use"] != flag: self.pts_data.SetPointData(key, {"use": flag}) @@ -1258,11 +1253,13 @@ def __init__( rules = RunCommand("v.colors", read=True, flags="l") - settsLabels = {} - - settsLabels["color_table"] = StaticText( - parent=self, id=wx.ID_ANY, label=_("Color table style %s:") % "(v.net.flow)" - ) + settsLabels = { + "color_table": StaticText( + parent=self, + id=wx.ID_ANY, + label=_("Color table style %s:") % "(v.net.flow)", + ) + } self.settings["color_table"] = ComboBox( parent=self, id=wx.ID_ANY, @@ -1651,11 +1648,7 @@ def InputSel(self): self._updateInputDbMgrPage(show=True) def GetData(self): - params = {} - for param, sel in self.inputData.items(): - params[param] = sel.GetValue() - - return params + return {param: sel.GetValue() for param, sel in self.inputData.items()} class OutputVectorDialog(wx.Dialog): diff --git a/gui/wxpython/vnet/toolbars.py b/gui/wxpython/vnet/toolbars.py index d31313091fe..4472d718064 100644 --- a/gui/wxpython/vnet/toolbars.py +++ b/gui/wxpython/vnet/toolbars.py @@ -208,10 +208,10 @@ def __init__(self, parent, vnet_mgr): self.vnet_mgr = vnet_mgr self.InitToolbar(self._toolbarData()) - choices = [] - - for moduleName in self.vnet_mgr.GetAnalyses(): - choices.append(self.vnet_mgr.GetAnalysisProperties(moduleName)["label"]) + choices = [ + self.vnet_mgr.GetAnalysisProperties(moduleName)["label"] + for moduleName in self.vnet_mgr.GetAnalyses() + ] self.anChoice = ComboBox( parent=self, diff --git a/gui/wxpython/vnet/vnet_core.py b/gui/wxpython/vnet/vnet_core.py index f55550e65e2..b019256bb75 100644 --- a/gui/wxpython/vnet/vnet_core.py +++ b/gui/wxpython/vnet/vnet_core.py @@ -419,9 +419,11 @@ def _vnetPathRunAn(self, analysis, output, params, flags, catPts): cats = self.data.GetAnalysisProperties()["cmdParams"]["cats"] # Creates part of cmd fro analysis - cmdParams = [analysis] - cmdParams.extend(self._setInputParams(analysis, params, flags)) - cmdParams.append("output=" + output) + cmdParams = [ + analysis, + *self._setInputParams(analysis, params, flags), + "output=" + output, + ] cmdPts = [] for cat in cats: @@ -513,9 +515,11 @@ def _onDone(self, event): def _runTurnsAn(self, analysis, output, params, flags, catPts): # Creates part of cmd fro analysis - cmdParams = [analysis] - cmdParams.extend(self._setInputParams(analysis, params, flags)) - cmdParams.append("output=" + output) + cmdParams = [ + analysis, + *self._setInputParams(analysis, params, flags), + "output=" + output, + ] cats = {} for cat_name, pts_coor in catPts.items(): @@ -622,9 +626,11 @@ def _runAn(self, analysis, output, params, flags, catPts): """Called for all v.net.* analysis (except v.net.path)""" # Creates part of cmd fro analysis - cmdParams = [analysis] - cmdParams.extend(self._setInputParams(analysis, params, flags)) - cmdParams.append("output=" + output) + cmdParams = [ + analysis, + *self._setInputParams(analysis, params, flags), + "output=" + output, + ] cats = self.data.GetAnalysisProperties()["cmdParams"]["cats"] @@ -765,9 +771,7 @@ def _getPtByCat(self, analysis): anProps = self.data.GetAnalysisProperties() cats = anProps["cmdParams"]["cats"] - ptByCats = {} - for cat in anProps["cmdParams"]["cats"]: - ptByCats[cat[0]] = [] + ptByCats = {cat[0]: [] for cat in anProps["cmdParams"]["cats"]} for i in range(self.pts_data.GetPointsCount()): pt_data = self.pts_data.GetPointData(i) @@ -893,13 +897,13 @@ def _updateHistStepData(self, histStepData): ptDataHist = histStepData["points"]["pt" + str(iPt)] e, n = ptDataHist["coords"] - pt_data = {"e": e, "n": n} - - pt_data["type"] = int(ptDataHist["catIdx"]) - - pt_data["topology"] = ptDataHist["topology"] - - pt_data["use"] = ptDataHist["checked"] + pt_data = { + "e": e, + "n": n, + "type": int(ptDataHist["catIdx"]), + "topology": ptDataHist["topology"], + "use": ptDataHist["checked"], + } pts.append(pt_data) @@ -1000,7 +1004,7 @@ def NewTmpVectMapToHist(self, prefMapName): def AddTmpMapAnalysisMsg(mapName, tmp_maps): # TODO - """Wraped AddTmpVectMap""" + """Wraps AddTmpVectMap""" msg = _( "Temporary map %s already exists.\n" + "Do you want to continue in analysis and overwrite it?" diff --git a/gui/wxpython/vnet/vnet_data.py b/gui/wxpython/vnet/vnet_data.py index 2074e037350..9dd1bff4f66 100644 --- a/gui/wxpython/vnet/vnet_data.py +++ b/gui/wxpython/vnet/vnet_data.py @@ -427,11 +427,7 @@ def _ptDataToList(self, pt_data): return pt_list_data def _ptListDataToPtData(self, pt_list_data): - pt_data = {} - for i, val in enumerate(pt_list_data): - pt_data[self.cols["name"][i]] = val - - return pt_data + return {self.cols["name"][i]: val for i, val in enumerate(pt_list_data)} def _usePoint(self, pt_id, use): """Item is checked/unchecked""" @@ -557,8 +553,7 @@ def OnMapClickHandler(self, event): def GetColumns(self, only_relevant=True): cols_data = deepcopy(self.cols) - hidden_cols = [] - hidden_cols.extend((self.cols["name"].index("e"), self.cols["name"].index("n"))) + hidden_cols = [self.cols["name"].index("e"), self.cols["name"].index("n")] analysis, valid = self.an_params.GetParam("analysis") if only_relevant and len(self.an_data[analysis]["cmdParams"]["cats"]) <= 1: @@ -1347,11 +1342,7 @@ def __init__(self): ] def GetData(self): - data = [] - for ival in self.turn_data: - data.append(ival[1:]) - - return data + return [ival[1:] for ival in self.turn_data] def GetValue(self, line, col): return self.turn_data[line][col] @@ -1364,7 +1355,7 @@ def SetValue(self, value, line, col): self.turn_data[line][col] = value def SetUTurns(self, value): - """Checked if checeBox is checed""" + """Checked if checkBox is checked""" self.useUTurns = value def AppendRow(self, values): @@ -1450,4 +1441,4 @@ def IsInInterval(self, from_angle, to_angle, angle) -> bool: if angle < from_angle: angle = math.pi * 2 + angle - return bool(angle > from_angle and angle < to_angle) + return bool(from_angle < angle < to_angle) diff --git a/gui/wxpython/vnet/widgets.py b/gui/wxpython/vnet/widgets.py index 71fcd0e0739..d8926f96fae 100644 --- a/gui/wxpython/vnet/widgets.py +++ b/gui/wxpython/vnet/widgets.py @@ -14,7 +14,7 @@ @author Original author Michael Barton @author Original version improved by Martin Landa -@author Rewritten by Markus Metz redesign georectfier -> GCP Manage +@author Rewritten by Markus Metz redesign georectifier -> GCP Manage @author Stepan Turek (Created PointsList from GCPList) (GSoC 2012, mentor: Martin Landa) """ @@ -511,7 +511,7 @@ def ShowColumn(self, colName, pos): :return: True if column was shown :return: False if position is not valid or column is not hidden """ - if pos < 0 and pos >= self.self.GetColumnCount(): + if pos < 0 or pos >= self.GetColumnCount(): return False if colName in self.hiddenCols: col = self.hiddenCols[colName] diff --git a/gui/wxpython/web_services/cap_interface.py b/gui/wxpython/web_services/cap_interface.py index 38cc7c3c72f..91b2b396dfd 100644 --- a/gui/wxpython/web_services/cap_interface.py +++ b/gui/wxpython/web_services/cap_interface.py @@ -109,11 +109,7 @@ def GetFormats(self): get_map_node = request_node.find(self.xml_ns.Ns("GetMap")) format_nodes = get_map_node.findall(self.xml_ns.Ns("Format")) - formats = [] - for node in format_nodes: - formats.append(node.text) - - return formats + return [node.text for node in format_nodes] class WMSLayer(LayerBase): @@ -265,10 +261,10 @@ def GetLayerData(self, param): return styles if param == "format": - formats = [] - for frmt in self.layer_node.findall(self.xml_ns.NsWmts("Format")): - formats.append(frmt.text.strip()) - return formats + return [ + frmt.text.strip() + for frmt in self.layer_node.findall(self.xml_ns.NsWmts("Format")) + ] if param == "srs": return self.projs diff --git a/gui/wxpython/web_services/dialogs.py b/gui/wxpython/web_services/dialogs.py index e47400cae70..309d4b8387c 100644 --- a/gui/wxpython/web_services/dialogs.py +++ b/gui/wxpython/web_services/dialogs.py @@ -178,7 +178,7 @@ def _doLayout(self): border=5, ) - # connectin settings + # connection settings settingsSizer = wx.StaticBoxSizer(self.settingsBox, wx.VERTICAL) serverSizer = wx.FlexGridSizer(cols=3, vgap=5, hgap=5) @@ -336,11 +336,10 @@ def OnClose(self, event): event.Skip() def _getCapFiles(self): - ws_cap_files = {} - for v in self.ws_panels.values(): - ws_cap_files[v["panel"].GetWebService()] = v["panel"].GetCapFile() - - return ws_cap_files + return { + v["panel"].GetWebService(): v["panel"].GetCapFile() + for v in self.ws_panels.values() + } def OnServer(self, event): """Server settings edited""" @@ -416,12 +415,9 @@ def _getConnectedWS(self): :return: list of found web services on server (identified as keys in self.ws_panels) """ - conn_ws = [] - for ws, data in self.ws_panels.items(): - if data["panel"].IsConnected(): - conn_ws.append(ws) - - return conn_ws + return [ + ws for ws, data in self.ws_panels.items() if data["panel"].IsConnected() + ] def UpdateDialogAfterConnection(self): """Update dialog after all web service panels downloaded and parsed @@ -748,12 +744,9 @@ def LoadCapFiles(self, ws_cap_files, cmd): ) def _getServerConnFromCmd(self, cmd): - """Get url/server/passwod from cmd tuple""" + """Get url/server/password from cmd tuple""" conn = {"url": "", "username": "", "password": ""} - - for k in conn.keys(): - if k in cmd[1]: - conn[k] = cmd[1][k] + conn |= {k: cmd[1][k] for k in conn.keys() if k in cmd[1]} return conn def _apply(self): diff --git a/gui/wxpython/web_services/widgets.py b/gui/wxpython/web_services/widgets.py index e54bee5603f..29a1c99014a 100644 --- a/gui/wxpython/web_services/widgets.py +++ b/gui/wxpython/web_services/widgets.py @@ -1,7 +1,7 @@ """ @package web_services.widgets -@brief Widgets for web services (WMS, WMTS, NasaOnEarh) +@brief Widgets for web services (WMS, WMTS, NasaOnEarth) List of classes: - widgets::WSPanel @@ -60,15 +60,14 @@ ) import grass.script as gs +from grass.pydispatch.signal import Signal rinwms_path = os.path.join(os.getenv("GISBASE"), "etc", "r.in.wms") if rinwms_path not in sys.path: sys.path.append(rinwms_path) -from wms_base import WMSDriversInfo -from srs import Srs - -from grass.pydispatch.signal import Signal +from wms_base import WMSDriversInfo # noqa:E402 +from srs import Srs # noqa:E402 class WSPanel(wx.Panel): @@ -251,14 +250,14 @@ def _advancedSettsPage(self): ) labels = {} - self.l_odrder_list = None + self.l_order_list = None if "WMS" in self.ws: labels["l_order"] = StaticBox( parent=adv_setts_panel, id=wx.ID_ANY, label=_("Order of layers in raster"), ) - self.l_odrder_list = wx.ListBox( + self.l_order_list = wx.ListBox( adv_setts_panel, id=wx.ID_ANY, choices=[], @@ -351,7 +350,7 @@ def _advancedSettsPage(self): gridSizer = wx.GridBagSizer(hgap=3, vgap=3) gridSizer.Add( - self.l_odrder_list, + self.l_order_list, pos=(0, 0), span=(4, 1), flag=wx.ALIGN_CENTER_VERTICAL | wx.EXPAND, @@ -427,8 +426,8 @@ def _advancedSettsPage(self): def OnUp(self, event): """Move selected layer up""" - if self.l_odrder_list.GetSelections(): - pos = self.l_odrder_list.GetSelection() + if self.l_order_list.GetSelections(): + pos = self.l_order_list.GetSelection() if pos: self.sel_layers.insert(pos - 1, self.sel_layers.pop(pos)) if pos > 0: @@ -438,8 +437,8 @@ def OnUp(self, event): def OnDown(self, event): """Move selected to down""" - if self.l_odrder_list.GetSelections(): - pos = self.l_odrder_list.GetSelection() + if self.l_order_list.GetSelections(): + pos = self.l_order_list.GetSelection() if pos != len(self.sel_layers) - 1: self.sel_layers.insert(pos + 1, self.sel_layers.pop(pos)) if pos < len(self.sel_layers) - 1: @@ -461,14 +460,14 @@ def getlayercaption(layer): return cap layer_capts = [getlayercaption(sel_layer) for sel_layer in self.sel_layers] - self.l_odrder_list.Set(layer_capts) - if self.l_odrder_list.IsEmpty(): + self.l_order_list.Set(layer_capts) + if self.l_order_list.IsEmpty(): self.enableButtons(False) else: self.enableButtons(True) if selected is not None: - self.l_odrder_list.SetSelection(selected) - self.l_odrder_list.EnsureVisible(selected) + self.l_order_list.SetSelection(selected) + self.l_order_list.EnsureVisible(selected) def OnTransparent(self, event): checked = event.IsChecked() @@ -515,10 +514,7 @@ def _prepareForNewConn(self, url, username, password): self.conn = {"url": url, "password": password, "username": username} - conn_cmd = [] - for k, v in self.conn.items(): - if v: - conn_cmd.append("%s=%s" % (k, v)) + conn_cmd = ["%s=%s" % (k, v) for k, v in self.conn.items() if v] self.ws_cmdl = self.ws_drvs[self.ws]["cmd"] + conn_cmd @@ -750,7 +746,6 @@ def OnListSelChanged(self, event): self.projs_list = [] projs_list = [] - intersect_proj = [] first = True for curr in curr_sel_ls: layer_projs = curr["cap_intf_l"].GetLayerData("srs") @@ -841,7 +836,7 @@ def _getFormats(self, layer=None): """Get formats WMS has formats defined generally for whole cap. - In WMTS and NASA OnEarh formats are defined for layer. + In WMTS and NASA OnEarth formats are defined for layer. """ formats_label = [] if layer is None: @@ -920,7 +915,7 @@ def __init__(self, parent, web_service, style, pos=wx.DefaultPosition): def LoadData(self, cap=None): """Load data into list""" - # detete first all items + # delete first all items self.DeleteAllItems() if not cap: diff --git a/gui/wxpython/wxplot/base.py b/gui/wxpython/wxplot/base.py index 7dfb444bd1e..057894c0250 100755 --- a/gui/wxpython/wxplot/base.py +++ b/gui/wxpython/wxplot/base.py @@ -1,7 +1,7 @@ """ @package wxplot.base -@brief Base classes for iinteractive plotting using PyPlot +@brief Base classes for interactive plotting using PyPlot Classes: - base::PlotIcons @@ -30,6 +30,7 @@ from gui_core.wrap import Menu import grass.script as gs +from grass.exceptions import CalledModuleError PlotIcons = { "draw": MetaIcon(img="show", label=_("Draw/re-draw plot")), @@ -204,7 +205,7 @@ def InitRasterOpts(self, rasterList, plottype): try: ret = gs.raster_info(r) - except: + except CalledModuleError: continue # if r.info cannot parse map, skip it @@ -270,7 +271,7 @@ def InitRasterPairs(self, rasterList, plottype): ret0 = gs.raster_info(rpair[0]) ret1 = gs.raster_info(rpair[1]) - except: + except (IndexError, CalledModuleError): continue # if r.info cannot parse map, skip it @@ -510,7 +511,6 @@ def OnMotion(self, event): def PlotOptionsMenu(self, event): """Popup menu for plot and text options""" - point = wx.GetMousePosition() popt = Menu() # Add items to the menu settext = wx.MenuItem(popt, wx.ID_ANY, _("Text settings")) @@ -608,7 +608,6 @@ def PlotOptions(self, event): def PrintMenu(self, event): """Print options and output menu""" - point = wx.GetMousePosition() printmenu = Menu() for title, handler in ( (_("Page setup"), self.OnPageSetup), diff --git a/gui/wxpython/wxplot/dialogs.py b/gui/wxpython/wxplot/dialogs.py index bae305a1817..cdb74c64a9e 100755 --- a/gui/wxpython/wxplot/dialogs.py +++ b/gui/wxpython/wxplot/dialogs.py @@ -986,9 +986,7 @@ def _do_layout(self): gridSizer = wx.GridBagSizer(vgap=5, hgap=5) row = 0 - choicelist = [] - for i in self.rasterList: - choicelist.append(str(i)) + choicelist = [str(i) for i in self.rasterList] self.mapchoice = Choice( parent=self, id=wx.ID_ANY, size=(300, -1), choices=choicelist diff --git a/gui/wxpython/wxplot/histogram.py b/gui/wxpython/wxplot/histogram.py index ef252f0b052..b65ebd03192 100644 --- a/gui/wxpython/wxplot/histogram.py +++ b/gui/wxpython/wxplot/histogram.py @@ -97,10 +97,7 @@ def OnCreateHist(self, event): create a list of cell value and count/percent/area pairs. This is passed to plot to create a line graph of the histogram. """ - try: - self.SetCursor(StockCursor(wx.CURSOR_ARROW)) - except: - pass + self.SetCursor(StockCursor(wx.CURSOR_ARROW)) self.SetGraphStyle() wx.BeginBusyCursor() diff --git a/gui/wxpython/wxplot/profile.py b/gui/wxpython/wxplot/profile.py index 240290e5b7b..e0994f1a8a5 100644 --- a/gui/wxpython/wxplot/profile.py +++ b/gui/wxpython/wxplot/profile.py @@ -193,7 +193,7 @@ def SetupProfile(self): # title of window self.ptitle = _("Profile of") - # Initialize lattitude-longitude geodesic distance calculation + # Initialize latitude-longitude geodesic distance calculation if self._is_lat_lon_proj and haveCtypes: gislib.G_begin_distance_calculations() @@ -250,7 +250,7 @@ def SetupProfile(self): # delete extra first segment point try: self.seglist.pop(0) - except: + except IndexError: pass # @@ -291,7 +291,6 @@ def CreateDatalist(self, raster, coords): # freezing with large, high resolution maps region = gs.region() curr_res = min(float(region["nsres"]), float(region["ewres"])) - transect_rec = 0 if self.transect_length / curr_res > 500: transect_res = self.transect_length / 500 else: @@ -486,7 +485,7 @@ def OnStats(self, event): statstr += "median: %f\n" % np.median(a) statstr += "distance along transect: %f\n\n" % self.transect_length message.append(statstr) - except: + except (ValueError, TypeError, KeyError, IndexError): pass stats = PlotStatsFrame(self, id=wx.ID_ANY, message=message, title=title) diff --git a/imagery/i.aster.toar/i.aster.toar.html b/imagery/i.aster.toar/i.aster.toar.html index 7442ce91766..aaaf287e200 100644 --- a/imagery/i.aster.toar/i.aster.toar.html +++ b/imagery/i.aster.toar/i.aster.toar.html @@ -9,9 +9,9 @@

    DESCRIPTION

    The order of input bands is

      -
    • VNIR: 1,2,3N,3B -
    • SWIR: 4,5,6,7,8,9 -
    • TIR: 10,11,12,13,14 +
    • VNIR: 1,2,3N,3B
    • +
    • SWIR: 4,5,6,7,8,9
    • +
    • TIR: 10,11,12,13,14
    in one comma-separated list. @@ -38,7 +38,7 @@

    SEE ALSO

    ASTER sensor data download: -ASTER: Advanced Spaceborne Thermal Emission and Reflection Radiometer +ASTER: Advanced Spaceborne Thermal Emission and Reflection Radiometer

    AUTHOR

    diff --git a/imagery/i.atcorr/create_iwave.py b/imagery/i.atcorr/create_iwave.py index 21faa21ac2c..827b8be54c4 100644 --- a/imagery/i.atcorr/create_iwave.py +++ b/imagery/i.atcorr/create_iwave.py @@ -59,20 +59,15 @@ def read_input(csvfile): first column is wavelength values are those of the discrete band filter functions """ - infile = open(csvfile) + with open(csvfile) as infile: + # get number of bands and band names + bands = infile.readline().strip().split(",")[1:] - # get number of bands and band names - bands = infile.readline().split(",") - bands.remove(bands[0]) - bands[-1] = bands[-1].strip() - print(" > Number of bands found: %d" % len(bands)) - infile.close() + print(f" > Number of bands found: {len(bands)}") # create converter dictionary for import # fix nodata or \n - conv = {} - for b in range(len(bands)): - conv[b + 1] = lambda s: float(s or 0) + conv = {b + 1: lambda s: float(s or 0) for b in range(len(bands))} values = np.loadtxt(csvfile, delimiter=",", skiprows=1, converters=conv) @@ -87,10 +82,8 @@ def interpolate_band(values, step=2.5): and min, max wl values values must be numpy array with 2 columns """ - # removing nodata and invalid values - w = values[:, 1] >= 0 - values_clean = values[w] + values_clean = values[values[:, 1] >= 0] wavelengths = values_clean[:, 0] # 1st column of input array responses = values_clean[:, 1] # 2nd column @@ -184,25 +177,25 @@ def pretty_print(filter_f): Create pretty string out of filter function 8 values per line, with spaces, commas and all the rest """ - pstring = "" + pstring = [] for i in range(len(filter_f) + 1): if i % 8 == 0: if i != 0: value_wo_leading_zero = ("%.4f" % (filter_f[i - 1])).lstrip("0") - pstring += value_wo_leading_zero - if i > 1 and i < len(filter_f): - pstring += ", " - if i != 1: + pstring.append(value_wo_leading_zero) + if i > 1: + if i < len(filter_f): + pstring.append(", ") # trim the trailing whitespace at the end of line - pstring = pstring.rstrip() - pstring += "\n " + pstring[-1] = pstring[-1].rstrip() + pstring.append("\n ") else: value_wo_leading_zero = ("%.4f" % (filter_f[i - 1])).lstrip("0") - pstring += value_wo_leading_zero + pstring.append(value_wo_leading_zero) if i < len(filter_f): - pstring += ", " + pstring.append(", ") # trim starting \n and trailing , - return pstring.lstrip("\n").rstrip(", ") + return "".join(pstring).lstrip("\n").rstrip(", ") def write_cpp(bands, values, sensor, folder): @@ -212,6 +205,24 @@ def write_cpp(bands, values, sensor, folder): needs other functions: interpolate_bands, pretty_print """ + def get_min_wavelength(c, rthresh, fi): + """Get minimum wavelength rounded by threshold. + + :param fi: filter function + """ + while c > 0 and fi[c - 1] > rthresh: + c -= 1 + return np.ceil(li[0] * 1000 + (2.5 * c)) + + def get_max_wavelength(c, rthresh, fi): + """Get maximum wavelength rounded by threshold. + + :param fi: filter function + """ + while c < len(fi) - 1 and fi[c + 1] > rthresh: + c += 1 + return np.floor(li[0] * 1000 + (2.5 * c)) + # keep in sync with IWave::parse() rthresh = 0.01 print(" > Response peaks from interpolation to 2.5 nm steps:") @@ -225,17 +236,8 @@ def write_cpp(bands, values, sensor, folder): li = limits # Get wavelength range for spectral response in band maxresponse_idx = np.argmax(fi) - # Get minimum wavelength with spectral response - c = maxresponse_idx - while c > 0 and fi[c - 1] > rthresh: - c -= 1 - min_wavelength = np.ceil(li[0] * 1000 + (2.5 * c)) - # Get maximum wavelength with spectral response - c = maxresponse_idx - while c < len(fi) - 1 and fi[c + 1] > rthresh: - c += 1 - max_wavelength = np.floor(li[0] * 1000 + (2.5 * c)) - print(" %s (%inm - %inm)" % (bands[0], min_wavelength, max_wavelength)) + min_wavelength = get_min_wavelength(maxresponse_idx, rthresh, fi) + max_wavelength = get_max_wavelength(maxresponse_idx, rthresh, fi) else: filter_f = [] @@ -247,29 +249,17 @@ def write_cpp(bands, values, sensor, folder): # Get wavelength range for spectral response in band maxresponse_idx = np.argmax(fi) - # Get minimum wavelength with spectral response - c = maxresponse_idx - while c > 0 and fi[c - 1] > rthresh: - c -= 1 - min_wavelength = np.ceil(li[0] * 1000 + (2.5 * c)) - # Get maximum wavelength with spectral response - c = maxresponse_idx - while c < len(fi) - 1 and fi[c + 1] > rthresh: - c += 1 - max_wavelength = np.floor(li[0] * 1000 + (2.5 * c)) + min_wavelength = get_min_wavelength(maxresponse_idx, rthresh, fi) + max_wavelength = get_max_wavelength(maxresponse_idx, rthresh, fi) print(" %s (%inm - %inm)" % (bands[b], min_wavelength, max_wavelength)) # writing... outfile = open(os.path.join(folder, sensor + "_cpp_template.txt"), "w") outfile.write("/* Following filter function created using create_iwave.py */\n\n") - if len(bands) == 1: - outfile.write("void IWave::%s()\n{\n\n" % (sensor.lower())) - else: - outfile.write("void IWave::%s(int iwa)\n{\n\n" % (sensor.lower())) - # single band case if len(bands) == 1: + outfile.write("void IWave::%s()\n{\n\n" % (sensor.lower())) outfile.write(" /* %s of %s */\n" % (bands[0], sensor)) outfile.write(" static const float sr[%i] = {" % (len(filter_f))) filter_text = pretty_print(filter_f) @@ -295,6 +285,7 @@ def write_cpp(bands, values, sensor, folder): outfile.write("}\n") else: # more than 1 band + outfile.write("void IWave::%s(int iwa)\n{\n\n" % (sensor.lower())) # writing bands for b in range(len(bands)): outfile.write(" /* %s of %s */\n" % (bands[b], sensor)) @@ -305,9 +296,8 @@ def write_cpp(bands, values, sensor, folder): outfile.write(filter_text + "\n };\n\t\n") # writing band limits - for b in range(len(bands)): - inf = ", ".join(["%.4f" % i[0] for i in limits]) - sup = ", ".join(["%.4f" % i[1] for i in limits]) + inf = ", ".join(["%.4f" % i[0] for i in limits]) + sup = ", ".join(["%.4f" % i[1] for i in limits]) outfile.write(" static const float wli[%i] = {%s};\n" % (len(bands), inf)) outfile.write(" static const float wls[%i] = {%s};\n" % (len(bands), sup)) diff --git a/imagery/i.atcorr/i.atcorr.html b/imagery/i.atcorr/i.atcorr.html index 0b905d87aad..8365c6294cf 100644 --- a/imagery/i.atcorr/i.atcorr.html +++ b/imagery/i.atcorr/i.atcorr.html @@ -837,26 +837,26 @@

    Atmospheric correction of a Sentinel-2 band

    particular scene and band. To create a 6S file, we need to obtain the following information:
      -
    • geometrical conditions, -
    • moth, day, decimal hours in GMT, decimal longitude and latitude of measurement, -
    • atmospheric model, -
    • aerosol model, -
    • visibility or aerosol optical depth, -
    • mean target elevation above sea level, -
    • sensor height and, -
    • sensor band. +
    • geometrical conditions,
    • +
    • moth, day, decimal hours in GMT, decimal longitude and latitude of measurement,
    • +
    • atmospheric model,
    • +
    • aerosol model,
    • +
    • visibility or aerosol optical depth,
    • +
    • mean target elevation above sea level,
    • +
    • sensor height and,
    • +
    • sensor band.
    1. Geometrical conditions -

      For Sentinel-2A, the geometrical conditions take the value 25 and for -Sentinel-2B, the geometrical conditions value is 26 (See table A). +

      For Sentinel-2A, the geometrical conditions take the value 25 and for +Sentinel-2B, the geometrical conditions value is 26 (See table A). Our scene comes from the Sentinel-2A mission (the file name begins with S2A_...).

    2. Day, time, longitude and latitude of measurement

      Day and time of the measurement are hidden in the filename (i.e., the -second datum in the file name with format YYYYMMDDTHHMMSS), +second datum in the file name with format YYYYMMDDTHHMMSS), and are also noted in the metadata file, which is included in the downloaded scene (file with .xml extension). Our sample scene was taken on October 28th (20161028) at 15:54:02 (155402). Note @@ -873,20 +873,20 @@

      Atmospheric correction of a Sentinel-2 band

      The longitude and latitude of the centre are stored in ll_clon -and ll_clat. In our case, ll_clon=-78.691 and -ll_clat=35.749. +and ll_clat. In our case, ll_clon=-78.691 and +ll_clat=35.749.

    3. Atmospheric model

      We can choose between various atmospheric models as defined at the -beginning of this manual. For North Carolina, we can choose 2 - -midlatitude summer. +beginning of this manual. For North Carolina, we can choose 2 - +midlatitude summer.

    4. Aerosol model

      We can also choose between various aerosol models as defined at the -beginning of this manual. For North Carolina, we can choose 1 - -continental model. +beginning of this manual. For North Carolina, we can choose 1 - +continental model.

    5. Visibility or Aerosol Optical Depth @@ -909,13 +909,13 @@

      Atmospheric correction of a Sentinel-2 band

      The mean elevation is stored in mean. In our case, -mean=110. In the 6S file it will be displayed in [-km], -i.e., -0.110. +mean=110. In the 6S file it will be displayed in [-km], +i.e., -0.110.

    6. Sensor height

      Since the sensor is on board a satellite, the sensor height will be -set to -1000. +set to -1000.

    7. Sensor band @@ -926,7 +926,7 @@

      Atmospheric correction of a Sentinel-2 band

      Finally, here is what the 6S file would look like for Band 02 of our scene. In order to use it in the i.atcorr module, we can save -it in a text file, for example params_B02.txt. +it in a text file, for example params_B02.txt.

       25
       10 28 15.901 -78.691 35.749
      @@ -944,17 +944,17 @@ 

      Atmospheric correction of a Sentinel-2 band

      B02 of our Sentinel 2 scene. We have to specify the following parameters:
        -
      • input = raster band to be processed, -
      • parameters = path to 6S file created in the previous step (we could also enter the values directly), -
      • output = name for the output corrected raster band, -
      • range = from 1 to the QUANTIFICATION_VALUE stored in the metadata file. It is 10000 for both Sentinel-2A and Sentinel-2B. -
      • rescale = the output range of values for the corrected bands. This is up to the user to choose, for example: 0-255, 0-1, 1-10000. +
      • input = raster band to be processed,
      • +
      • parameters = path to 6S file created in the previous step (we could also enter the values directly),
      • +
      • output = name for the output corrected raster band,
      • +
      • range = from 1 to the QUANTIFICATION_VALUE stored in the metadata file. It is 10000 for both Sentinel-2A and Sentinel-2B.
      • +
      • rescale = the output range of values for the corrected bands. This is up to the user to choose, for example: 0-255, 0-1, 1-10000.

      If the data is available, the following parameters can be specified as well:

        -
      • elevation = raster of digital elevation model, -
      • visibility = raster of visibility model. +
      • elevation = raster of digital elevation model,
      • +
      • visibility = raster of visibility model.

      Finally, this is how the command would look like to apply atmospheric @@ -1007,7 +1007,7 @@

      Atmospheric correction of a Landsat-7 band

      If the overpass time is unknown, use the -NASA LaRC Satellite Overpass Predictor. +NASA LaRC Satellite Overpass Predictor.

      Convert digital numbers (DN) to radiance at top-of-atmosphere (TOA)

      @@ -1039,7 +1039,7 @@

      Convert digital numbers (DN) to radiance at top-of-atmosphere (TOA)

      and maximal DN value, and they are reported in the metadata file of each image. High gain or low gain is also reported in the metadata file of each satellite image. For Landsat ETM+, the minimal DN value (QCALMIN) is 1 -(see Landsat handbook, chapter 11), +(see Landsat handbook, chapter 11), and the maximal DN value (QCALMAX) is 255. QCAL is the DN value for every separate pixel in the Landsat image.

      We extract the coefficients and apply them in order to obtain the @@ -1072,7 +1072,7 @@

      Create the parameters file for i.atcorr

      defining geometrical and atmospherical conditions of the satellite overpass. -Here we create a control file icnd_lsat4.txt for band 4 (NIR), +Here we create a control file icnd_lsat4.txt for band 4 (NIR), based on metadata. For the overpass time, we need to define decimal hours: 10:42:07 NC local time = 10.70 decimal hours (decimal minutes: 42 * 100 / 60) which is 15.70 GMT. @@ -1098,7 +1098,7 @@

      Create the parameters file for i.atcorr

      Note that the altitude value from 'icnd_lsat4.txt' file is read at the beginning to compute the initial transform. Therefore, it is necessary to provide a value that might be the mean value of the elevation model -(r.univar elevation). For the atmospheric correction per se, the +(r.univar elevation). For the atmospheric correction per se, the elevation values from the raster map are used.

      Note that the process is computationally intensive. Note also, that i.atcorr reports solar elevation angle above horizon rather @@ -1115,20 +1115,20 @@

      REFERENCES

      • Vermote, E.F., Tanre, D., Deuze, J.L., Herman, M., and Morcrette, J.J., 1997, Second simulation of the satellite signal in the solar spectrum, 6S: An -overview., IEEE Trans. Geosc. and Remote Sens. 35(3):675-686. +overview., IEEE Trans. Geosc. and Remote Sens. 35(3):675-686.
      • 6S Manual: PDF1, PDF2, - and PDF3 -
      • RapidEye sensors have been provided by RapidEye AG, Germany + and PDF3
      • +
      • RapidEye sensors have been provided by RapidEye AG, Germany
      • Barsi, J.A., Markham, B.L. and Pedelty, J.A., 2011, The operational land imager: spectral response and spectral uniformity., Proc. SPIE 8153, -81530G; doi:10.1117/12.895438 +81530G; doi:10.1117/12.895438

      SEE ALSO

      @@ -1164,10 +1164,10 @@

      AUTHORS

      RapidEye sensors addition 11/2010:
      Peter Löwe, Anne Ghisla -

      VGT1 and VGT2 sensors addition from 6SV-1.1 sources, addition 07/2011: +

      VGT1 and VGT2 sensors addition from 6SV-1.1 sources, addition 07/2011:
      Alfredo Alessandrini, Anne Ghisla -

      Added Landsat 8 from NASA sources, addition 05/2014: +

      Added Landsat 8 from NASA sources, addition 05/2014:
      Nikolaos Ves

      Geoeye1 addition 7/2015: diff --git a/imagery/i.biomass/i.biomass.html b/imagery/i.biomass/i.biomass.html index 563fffa4f8f..152ec3131aa 100644 --- a/imagery/i.biomass/i.biomass.html +++ b/imagery/i.biomass/i.biomass.html @@ -4,12 +4,12 @@

      DESCRIPTION

      Input:
        -
      • fPAR, the modified Photosynthetic Active Radiation for crops. -
      • Light Use Efficiency [0.0-1.0], in Uzbekistan cotton is at 1.9 most of the time. -
      • Latitude [0.0-90.0], from r.latlong. -
      • DOY [1-366]. -
      • Transmissivity of the atmosphere single-way [0.0-1.0], mostly around 0.7+ in clear sky. -
      • Water availability [0.0-1.0], possibly using direct output from i.eb.evapfr. +
      • fPAR, the modified Photosynthetic Active Radiation for crops.
      • +
      • Light Use Efficiency [0.0-1.0], in Uzbekistan cotton is at 1.9 most of the time.
      • +
      • Latitude [0.0-90.0], from r.latlong.
      • +
      • DOY [1-366].
      • +
      • Transmissivity of the atmosphere single-way [0.0-1.0], mostly around 0.7+ in clear sky.
      • +
      • Water availability [0.0-1.0], possibly using direct output from i.eb.evapfr.

      NOTES

      diff --git a/imagery/i.cluster/i.cluster.html b/imagery/i.cluster/i.cluster.html index 8b775c54d80..e7cff674606 100644 --- a/imagery/i.cluster/i.cluster.html +++ b/imagery/i.cluster/i.cluster.html @@ -18,8 +18,8 @@

      DESCRIPTION

      -
      - +
      +
      @@ -207,7 +207,7 @@

      Parameters:


      Default: 17 - +
      reportfile=name
      The reportfile is an optional parameter which contains diff --git a/imagery/i.eb.eta/i.eb.eta.html b/imagery/i.eb.eta/i.eb.eta.html index 7915c7eba1a..0cfc9841801 100644 --- a/imagery/i.eb.eta/i.eb.eta.html +++ b/imagery/i.eb.eta/i.eb.eta.html @@ -9,10 +9,10 @@

      NOTES

      Full ETa processing will need those:
        -
      • i.vi, i.albedo, r.latlong, i.emissivity -
      • i.evapo.potrad (GRASS Addon) -
      • i.eb.netrad, i.eb.soilheatflux, i.eb.hsebal01 -
      • i.eb.evapfr, i.eb.eta +
      • i.vi, i.albedo, r.latlong, i.emissivity
      • +
      • i.evapo.potrad (GRASS Addon)
      • +
      • i.eb.netrad, i.eb.soilheatflux, i.eb.hsebal01
      • +
      • i.eb.evapfr, i.eb.eta
      (for time integration: i.evapo.time_integration) diff --git a/imagery/i.eb.hsebal01/i.eb.hsebal01.html b/imagery/i.eb.hsebal01/i.eb.hsebal01.html index 4501af13c3c..962c6273f67 100644 --- a/imagery/i.eb.hsebal01/i.eb.hsebal01.html +++ b/imagery/i.eb.hsebal01/i.eb.hsebal01.html @@ -8,10 +8,10 @@

      DESCRIPTION

      Full process will need those:
        -
      • i.vi, i.albedo, r.latlong, i.emissivity -
      • i.evapo.potrad (GRASS Addon) -
      • i.eb.netrad, i.eb.soilheatflux, i.eb.hsebal01 -
      • i.eb.evapfr, i.eb.eta +
      • i.vi, i.albedo, r.latlong, i.emissivity
      • +
      • i.evapo.potrad (GRASS Addon)
      • +
      • i.eb.netrad, i.eb.soilheatflux, i.eb.hsebal01
      • +
      • i.eb.evapfr, i.eb.eta
      (for time integration: i.evapo.time_integration) @@ -24,12 +24,12 @@

      DESCRIPTION

      NOTES

        -
      • z0m can be alculated by i.eb.z0m or i.eb.z0m0 (GRASS Addons). +
      • z0m can be alculated by i.eb.z0m or i.eb.z0m0 (GRASS Addons).
      • ea can be calculated with standard meteorological data.
        - eoTmin=0.6108*EXP(17.27*Tmin/(Tmin+237.3))
        - eoTmax=0.6108*EXP(17.27*Tmax/(Tmax+237.3))
        - ea=(RH/100)/((eoTmin+eoTmax)/2) -
      • t0dem = surface temperature + (altitude * 0.627 / 100) + eoTmin=0.6108*EXP(17.27*Tmin/(Tmin+237.3))
        + eoTmax=0.6108*EXP(17.27*Tmax/(Tmax+237.3))
        + ea=(RH/100)/((eoTmin+eoTmax)/2)
      • +
      • t0dem = surface temperature + (altitude * 0.627 / 100)

      REFERENCES

      diff --git a/imagery/i.eb.netrad/i.eb.netrad.html b/imagery/i.eb.netrad/i.eb.netrad.html index c6cba3ad484..d4d96b7d966 100644 --- a/imagery/i.eb.netrad/i.eb.netrad.html +++ b/imagery/i.eb.netrad/i.eb.netrad.html @@ -14,7 +14,7 @@

      NOTES

      In the old methods, dT was taken as flat images (dT=5.0), if you don't have a dT map from ground data, you would want to try something in this line, this is to calculate atmospherical energy balance. In the same way, a standard tsw is used -in those equations. Refer to r_net.c for that and for other non-used equations, +in those equations. Refer to r_net.c for that and for other non-used equations, but stored in there for further research convenience.

      TODO

      diff --git a/imagery/i.emissivity/i.emissivity.html b/imagery/i.emissivity/i.emissivity.html index e6419e8f776..0b44b664442 100644 --- a/imagery/i.emissivity/i.emissivity.html +++ b/imagery/i.emissivity/i.emissivity.html @@ -29,7 +29,7 @@

      REFERENCES

    8. Rubio, E., V. Caselles, and C. Badenas, 1997. Emissivity measurements of several soils and vegetation types in the 8-14 µm wave band: Analysis of two field methods. Remote Sensing of - Environment 59(3): 490-521. + Environment 59(3): 490-521.
    9. SEE ALSO

      diff --git a/imagery/i.evapo.pm/i.evapo.pm.html b/imagery/i.evapo.pm/i.evapo.pm.html index b9f5f66d8e9..efa25bfed44 100644 --- a/imagery/i.evapo.pm/i.evapo.pm.html +++ b/imagery/i.evapo.pm/i.evapo.pm.html @@ -18,9 +18,9 @@

      DESCRIPTION

      Land and water surfaces are idenfyied by Vh:

        -
      • where Vh gt 0 vegetation is present and evapotranspiration is calculated; -
      • where Vh = 0 bare ground is present and evapotranspiration is calculated; -
      • where Vh lt 0 water surface is present and evaporation is calculated. +
      • where Vh gt 0 vegetation is present and evapotranspiration is calculated;
      • +
      • where Vh = 0 bare ground is present and evapotranspiration is calculated;
      • +
      • where Vh lt 0 water surface is present and evaporation is calculated.

      For more details on the algorithms see [1,2,3]. @@ -46,7 +46,7 @@

      NOTES

      REFERENCES

      [1] Cannata M., 2006. - GIS embedded approach for Free & Open Source Hydrological Modelling. PhD thesis, Department of Geodesy and Geomatics, Polytechnic of Milan, Italy. + GIS embedded approach for Free & Open Source Hydrological Modelling. PhD thesis, Department of Geodesy and Geomatics, Polytechnic of Milan, Italy.

      [2] Allen, R.G., L.S. Pereira, D. Raes, and M. Smith. 1998. Crop Evapotranspiration: Guidelines for computing crop water requirements. @@ -73,9 +73,8 @@

      AUTHORS

      Original version of program: The HydroFOSS project, 2006, IST-SUPSI. (http://istgis.ist.supsi.ch:8001/geomatica/index.php?id=1) - -
      Massimiliano Cannata, Scuola Universitaria Professionale della Svizzera Italiana - Istituto Scienze della Terra -
      Maria A. Brovelli, Politecnico di Milano - Polo regionale di Como -
      +Massimiliano Cannata, Scuola Universitaria Professionale della Svizzera Italiana - Istituto Scienze della Terra +
      +Maria A. Brovelli, Politecnico di Milano - Polo regionale di Como

      Contact: Massimiliano Cannata diff --git a/imagery/i.evapo.pt/i.evapo.pt.html b/imagery/i.evapo.pt/i.evapo.pt.html index cd890e425ee..10887a5bb50 100644 --- a/imagery/i.evapo.pt/i.evapo.pt.html +++ b/imagery/i.evapo.pt/i.evapo.pt.html @@ -12,13 +12,13 @@

      NOTES

      Alpha values:

      • 1.32 for estimates from vegetated areas as a result of the increase in -surface roughness (Morton, 1983; Brutsaert and Stricker, 1979) +surface roughness (Morton, 1983; Brutsaert and Stricker, 1979)
      • 1.26 is applicable in humid climates (De Bruin and Keijman, 1979; Stewart and Rouse, 1976; Shuttleworth and Calder, 1979), and temperate -hardwood swamps (Munro, 1979) +hardwood swamps (Munro, 1979)
      • 1.74 has been recommended for estimating potential evapotranspiration in more arid regions (ASCE, 1990). This worked well in Greece with University -of Thessaloniki. +of Thessaloniki.
      Alpha values extracted from: diff --git a/imagery/i.evapo.time/i.evapo.time.html b/imagery/i.evapo.time/i.evapo.time.html index c0aa1bdac15..15b5bb17b34 100644 --- a/imagery/i.evapo.time/i.evapo.time.html +++ b/imagery/i.evapo.time/i.evapo.time.html @@ -5,17 +5,17 @@

      DESCRIPTION

      Inputs:
        -
      • ETa images -
      • ETa images DOY (Day of Year) -
      • ETo images -
      • ETo DOYmin as a single value +
      • ETa images
      • +
      • ETa images DOY (Day of Year)
      • +
      • ETo images
      • +
      • ETo DOYmin as a single value
      Method:
        -
      1. each ETa pixel is divided by the same day ETo and become ETrF -
      2. each ETrF pixel is multiplied by the ETo sum for the representative days -
      3. Sum all n temporal [ETrF*ETo_sum] pixels to make a summed(ET) in [DOYmin;DOYmax] +
      4. each ETa pixel is divided by the same day ETo and become ETrF
      5. +
      6. each ETrF pixel is multiplied by the ETo sum for the representative days
      7. +
      8. Sum all n temporal [ETrF*ETo_sum] pixels to make a summed(ET) in [DOYmin;DOYmax]
      representative days calculation: @@ -35,8 +35,8 @@

      NOTES

      n=0 for ETo_val in Eto[1] Eto[2] ... do - r.mapcalc "eto$n = $ETo_val" - `expr n = n + 1` + r.mapcalc "eto$n = $ETo_val" + `expr n = n + 1` done diff --git a/imagery/i.fft/i.fft.html b/imagery/i.fft/i.fft.html index 8eb63a9c8ae..26782835c5c 100644 --- a/imagery/i.fft/i.fft.html +++ b/imagery/i.fft/i.fft.html @@ -42,10 +42,10 @@

      REFERENCES

    10. M. Frigo and S. G. Johnson (1998): "FFTW: An Adaptive Software Architecture for the FFT". See www.FFTW.org: FFTW is a C subroutine library for computing the Discrete Fourier Transform (DFT) in one or more -dimensions, of both real and complex data, and of arbitrary input size. -
    11. John A. Richards, 1986. Remote Sensing Digital Image Analysis, Springer-Verlag. +dimensions, of both real and complex data, and of arbitrary input size.
    12. +
    13. John A. Richards, 1986. Remote Sensing Digital Image Analysis, Springer-Verlag.
    14. Personal communication, between program author and Ali R. Vali, -Space Research Center, University of Texas, Austin, 1990. +Space Research Center, University of Texas, Austin, 1990.
    15. SEE ALSO

      diff --git a/imagery/i.gensig/i.gensig.html b/imagery/i.gensig/i.gensig.html index 97db7777447..b878fc41f73 100644 --- a/imagery/i.gensig/i.gensig.html +++ b/imagery/i.gensig/i.gensig.html @@ -60,7 +60,7 @@

      Parameters

      image.

      -

      subgroup=name +
      subgroup=name
      subgroup containing image files

      @@ -108,13 +108,13 @@

      NOTES

        -
      • Line 1: version number (currently always 1) -
      • Line 2: text label -
      • Line 3: Space separated list of semantic labels -
      • Line 4: text label of class -
      • Line 5: number of points in class -
      • Line 6: mean values per band of the class -
      • Line 7-12: (semi)-matrix of band-band covariance +
      • Line 1: version number (currently always 1)
      • +
      • Line 2: text label
      • +
      • Line 3: Space separated list of semantic labels
      • +
      • Line 4: text label of class
      • +
      • Line 5: number of points in class
      • +
      • Line 6: mean values per band of the class
      • +
      • Line 7-12: (semi)-matrix of band-band covariance

      SEE ALSO

      diff --git a/imagery/i.gensigset/i.gensigset.html b/imagery/i.gensigset/i.gensigset.html index 3c5c170dceb..5db3efa5802 100644 --- a/imagery/i.gensigset/i.gensigset.html +++ b/imagery/i.gensigset/i.gensigset.html @@ -206,13 +206,13 @@

      WARNINGS

      REFERENCES

      diff --git a/imagery/i.group/main.c b/imagery/i.group/main.c index d9a51e01073..84c08cc83e7 100644 --- a/imagery/i.group/main.c +++ b/imagery/i.group/main.c @@ -448,6 +448,7 @@ static void print_subgroups(const char *group, const char *mapset, int simple) if (subgs_num <= 0) { fprintf(stdout, _("Group <%s> does not contain any subgroup.\n"), group); + G_free(subgs); return; } max = 0; diff --git a/imagery/i.landsat.acca/i.landsat.acca.html b/imagery/i.landsat.acca/i.landsat.acca.html index 5e594389762..41f69905706 100644 --- a/imagery/i.landsat.acca/i.landsat.acca.html +++ b/imagery/i.landsat.acca/i.landsat.acca.html @@ -22,9 +22,9 @@

      EXAMPLES

      Run the standard ACCA algorithm with filling of small cloud holes (the -f flag): With per-band reflectance raster maps -named 226_62.toar.1, 226_62.toar.2, [...] and LANDSAT-7 -thermal band 226_62.toar.61, outputting to a new raster map -named 226_62.acca: +named 226_62.toar.1, 226_62.toar.2, ... and LANDSAT-7 +thermal band 226_62.toar.61, outputting to a new raster map +named 226_62.acca:
       i.landsat.toar sensor=7 gain=HHHLHLHHL date=2003-04-07 \
      @@ -35,7 +35,7 @@ 

      EXAMPLES

      REFERENCES

      -
        +
        • Irish R.R., Barker J.L., Goward S.N., and Arvidson T., 2006. Characterization of the Landsat-7 ETM+ Automated Cloud-Cover Assessment (ACCA) Algorithm. Photogrammetric Engineering and Remote @@ -45,7 +45,7 @@

          REFERENCES

          S.S. Shen and M.R. Descour (Eds.): Algorithms for Multispectral, Hyperspectral, and Ultraspectral Imagery VI. Proceedings of SPIE, 4049: 348-355.
        • -
      +

      SEE ALSO

      diff --git a/imagery/i.landsat.toar/i.landsat.toar.html b/imagery/i.landsat.toar/i.landsat.toar.html index 4a792c26274..3305c80b7de 100644 --- a/imagery/i.landsat.toar/i.landsat.toar.html +++ b/imagery/i.landsat.toar/i.landsat.toar.html @@ -243,7 +243,7 @@

      DOS1 example

      product_date=2004-02-12 gain=HHHLHLHHL
      -The resulting Landsat channels are named lsat7_2002_toar.1 .. lsat7_2002_toar.8. +The resulting Landsat channels are named lsat7_2002_toar.1 .. lsat7_2002_toar.8.

      REFERENCES

      diff --git a/imagery/i.modis.qc/i.modis.qc.html b/imagery/i.modis.qc/i.modis.qc.html index 64553966a27..8f96364a937 100644 --- a/imagery/i.modis.qc/i.modis.qc.html +++ b/imagery/i.modis.qc/i.modis.qc.html @@ -498,12 +498,12 @@

      NOTES

      MOD11A2 quality control (QC) bands do not have a FillValue (No-data) according to MODIS Land Products site. -However, the metadata of the QC bands (i.e.: gdalinfo QC_band) shows No-data=0. +However, the metadata of the QC bands (i.e.: gdalinfo QC_band) shows No-data=0. This value is then transformed into GRASS NULLs when data is imported through r.in.gdal. Applying i.modis.qc on those QC bands will not give the expected range of values in the different QC bits. Therefore, before using i.modis.qc, the user needs to set the NULL value in QC bands -back to zero (i.e.: r.null map=QC_band null=0) or just edit the metadata with GDAL +back to zero (i.e.: r.null map=QC_band null=0) or just edit the metadata with GDAL utilities before importing into GRASS GIS. This is a known issue for MOD11A2 (8-day LST product), but other MODIS products might be affected as well. @@ -514,10 +514,10 @@

      TODO

      REFERENCES

        -
      • MODIS Products +
      • MODIS Products
      • Vermote E.F., Kotchenova S.Y., Ray J.P. MODIS Surface Reflectance User's Guide. Version 1.2. June 2008. MODIS Land Surface Reflectance Science Computing Facility. - Homepage + Homepage

      SEE ALSO

      diff --git a/imagery/i.ortho.photo/i.ortho.camera/i.ortho.camera.html b/imagery/i.ortho.photo/i.ortho.camera/i.ortho.camera.html index 535ef8e03a7..3159e4812b2 100644 --- a/imagery/i.ortho.photo/i.ortho.camera/i.ortho.camera.html +++ b/imagery/i.ortho.photo/i.ortho.camera/i.ortho.camera.html @@ -24,12 +24,12 @@

      DESCRIPTION

       
      -	CAMERA NAME:               camera name______
      -	CAMERA IDENTIFICATION:     identification___
      -	CALIBRATED FOCAL LENGTH mm.:_________________
      -	POINT OF SYMMETRY (X)   mm.:_________________
      -	POINT OF SYMMETRY (Y)   mm.:_________________
      -	MAXIMUM NUMBER OF FIDUCIALS:_________________
      +    CAMERA NAME:               camera name______
      +    CAMERA IDENTIFICATION:     identification___
      +    CALIBRATED FOCAL LENGTH mm.:_________________
      +    POINT OF SYMMETRY (X)   mm.:_________________
      +    POINT OF SYMMETRY (Y)   mm.:_________________
      +    MAXIMUM NUMBER OF FIDUCIALS:_________________
       
          AFTER COMPLETING ALL ANSWERS, HIT <ESC> TO CONTINUE
                      (OR <Ctrl-C> TO CANCEL)
      @@ -73,20 +73,20 @@ 

      DESCRIPTION

      Please provide the following information

      -	Fid#	FID ID		  X          Y
      -
      -	1__	_____		0.0___	0.0___
      -	2__	_____		0.0___	0.0___
      -	3__	_____		0.0___	0.0___
      -	4__	_____		0.0___	0.0___
      -	5__	_____		0.0___	0.0___
      -	6__	_____		0.0___	0.0___
      -	7__	_____		0.0___	0.0___
      -	8__	_____		0.0___	0.0___
      -	9__	_____		0.0___	0.0___
      -	10_	_____		0.0___	0.0___
      -
      -		     next:  end__
      +    Fid#    FID ID          X          Y
      +
      +    1__    _____        0.0___    0.0___
      +    2__    _____        0.0___    0.0___
      +    3__    _____        0.0___    0.0___
      +    4__    _____        0.0___    0.0___
      +    5__    _____        0.0___    0.0___
      +    6__    _____        0.0___    0.0___
      +    7__    _____        0.0___    0.0___
      +    8__    _____        0.0___    0.0___
      +    9__    _____        0.0___    0.0___
      +    10_    _____        0.0___    0.0___
      +
      +             next:  end__
       
            AFTER COMPLETING ALL ANSWERS, HIT <ESC> TO CONTINUE
                           (OR <Ctrl-C> TO CANCEL)
      diff --git a/imagery/i.ortho.photo/i.ortho.elev/i.ortho.elev.html b/imagery/i.ortho.photo/i.ortho.elev/i.ortho.elev.html
      index ab5b66eaa38..21fedea7e7f 100644
      --- a/imagery/i.ortho.photo/i.ortho.elev/i.ortho.elev.html
      +++ b/imagery/i.ortho.photo/i.ortho.elev/i.ortho.elev.html
      @@ -1,6 +1,14 @@
       

      DESCRIPTION

      -i.ortho.elev is used to select or modify the target elevation model. +i.ortho.elev is used to select or modify the target elevation +model for orthorectification of imagery. +This elevation model is essential for both the computation of photo-to-target +parameters and for the actual orthorectification of imagery group files. +The elevation model selected should cover the entire area of the image +group to be orthorectified. + +Optionally, scaled elevation data can be converted to real elevation values +specifying a mathematical expression.

      SEE ALSO

      diff --git a/imagery/i.ortho.photo/i.ortho.init/i.ortho.init.html b/imagery/i.ortho.photo/i.ortho.init/i.ortho.init.html index 5da40c86344..a9e86ab2a0b 100644 --- a/imagery/i.ortho.photo/i.ortho.init/i.ortho.init.html +++ b/imagery/i.ortho.photo/i.ortho.init/i.ortho.init.html @@ -19,9 +19,8 @@

      DESCRIPTION

      parameters. During the imagery program, i.photo.rectify, the initial camera exposure station file is used for computation of the ortho-rectification parameters. If no initial camera exposure station file exist, the default -values are computed from the control points file created in g.gui.image2target. - +values are computed from the control points file created in +g.gui.image2target.

      @@ -29,19 +28,19 @@

      DESCRIPTION

               Please provide the following information
       
      -	INITIAL XC: Meters                __________
      -	INITIAL YC: Meters                __________
      -	INITIAL ZC: Meters                __________
      -	INITIAL omega (pitch) degrees:    __________
      -	INITIAL phi  (roll) degrees:      __________
      -	INITIAL kappa  (yaw) degrees:     __________
      +    INITIAL XC: Meters                __________
      +    INITIAL YC: Meters                __________
      +    INITIAL ZC: Meters                __________
      +    INITIAL omega (pitch) degrees:    __________
      +    INITIAL phi  (roll) degrees:      __________
      +    INITIAL kappa  (yaw) degrees:     __________
       
      -	Standard Deviation XC: Meters     __________
      -	Standard Deviation YC: Meters     __________
      -	Standard Deviation ZC: Meters     __________
      -	Std. Dev. omega (pitch) degrees:  __________
      -	Std. Dev. phi  (roll) degrees:    __________
      -	Std. Dev. kappa  (yaw) degrees:   __________
      +    Standard Deviation XC: Meters     __________
      +    Standard Deviation YC: Meters     __________
      +    Standard Deviation ZC: Meters     __________
      +    Std. Dev. omega (pitch) degrees:  __________
      +    Std. Dev. phi  (roll) degrees:    __________
      +    Std. Dev. kappa  (yaw) degrees:   __________
       
               Use these values at run time? (1=yes, 0=no)
       
      @@ -55,9 +54,9 @@ 

      DESCRIPTION

      exposure.
        -
      • X: East aircraft position; -
      • Y: North aircraft position; -
      • Z: Flight altitude above sea level +
      • X: East aircraft position;
      • +
      • Y: North aircraft position;
      • +
      • Z: Flight altitude above sea level

      @@ -68,12 +67,12 @@

      DESCRIPTION

      • Omega (pitch): Raising or lowering of the aircraft's front (turning - around the wings' axis); + around the wings' axis);
      • Phi (roll): Raising or lowering of the wings (turning around the - aircraft's axis); + aircraft's axis);
      • Kappa (yaw): Rotation needed to align the aerial photo to true north: needs to be denoted as +90 degree for clockwise turn and -90 degree for - a counterclockwise turn. + a counterclockwise turn.

      diff --git a/imagery/i.ortho.photo/i.ortho.photo/i.ortho.photo.html b/imagery/i.ortho.photo/i.ortho.photo/i.ortho.photo.html index c7ce061b01d..4e64c5a2b5e 100644 --- a/imagery/i.ortho.photo/i.ortho.photo/i.ortho.photo.html +++ b/imagery/i.ortho.photo/i.ortho.photo/i.ortho.photo.html @@ -11,7 +11,7 @@

      DESCRIPTION

      • Initialization Options
          -
        1. Create/Modify imagery group to be orthorectified: +
        2. Create/Modify imagery group to be orthorectified: i.group
        3. Select/Modify target project (formerly known as location) and mapset for orthorectification: i.ortho.target
        4. @@ -23,7 +23,7 @@

          DESCRIPTION

        5. Transformation Parameters Computation
            -
          1. Compute image-to-photo transformation: +
          2. Compute image-to-photo transformation: g.gui.photo2image
          3. Initialize parameters of camera: i.ortho.init
          4. @@ -34,7 +34,7 @@

            DESCRIPTION

          5. Ortho-rectification
              -
            1. Ortho-rectify imagery group: +
            2. Ortho-rectify imagery group: i.ortho.rectify
          6. @@ -229,7 +229,7 @@

            EXAMPLE

          7. Y: North aircraft position;
          8. Z: Flight height above surface;
          9. Omega (pitch): Raising or lowering of the aircraft's front - (turning around the wings' axis);
          10. + (turning around the wings' axis);
          11. Phi (roll): Raising or lowering of the wings (turning around the aircraft's axis);
          12. Kappa (yaw): Rotation needed to align the aerial photo to diff --git a/imagery/i.ortho.photo/i.ortho.rectify/i.ortho.rectify.html b/imagery/i.ortho.photo/i.ortho.rectify/i.ortho.rectify.html index 96a649d8b8e..607e53f70bc 100644 --- a/imagery/i.ortho.photo/i.ortho.rectify/i.ortho.rectify.html +++ b/imagery/i.ortho.photo/i.ortho.rectify/i.ortho.rectify.html @@ -1,4 +1,4 @@ -

            DESCRIPTION

            +

            DESCRIPTION

            i.photo.rectify rectifies an image by using the image to photo coordinate transformation matrix created by g.gui.photo2image @@ -44,7 +44,7 @@

            DESCRIPTION

            i.ortho.photo, an interactive terminal is used to determine the options. -

            Interactive mode

            +

            Interactive mode

            You are first asked if all images within the imagery group should be rectified. If this option is not chosen, you are asked to specify for each image within the imagery group whether it should be rectified or not. @@ -89,19 +89,19 @@

            Interactive mode

            The last prompt will ask you about the amount of memory to be used by i.photo.rectify. -

            SEE ALSO

            +

            SEE ALSO

            -i.ortho.photo
            -i.ortho.camera
            -g.gui.photo2image
            -g.gui.image2target
            -i.ortho.init
            +i.ortho.photo, +i.ortho.camera, +g.gui.photo2image, +g.gui.image2target, +i.ortho.init, i.rectify
            -

            AUTHORS

            +

            AUTHORS

            Mike Baba, DBA Systems, Inc.
            Updated rectification and elevation map to FP 1/2002 Markus Neteler
            diff --git a/imagery/i.ortho.photo/i.ortho.target/i.ortho.target.html b/imagery/i.ortho.photo/i.ortho.target/i.ortho.target.html index 312cff5599d..07a4c5c6a2e 100644 --- a/imagery/i.ortho.photo/i.ortho.target/i.ortho.target.html +++ b/imagery/i.ortho.photo/i.ortho.target/i.ortho.target.html @@ -1,22 +1,21 @@ -

            DESCRIPTION

            +

            DESCRIPTION

            -i.ortho.target sets the image group target project (location) and mapset -

            +i.ortho.target sets the image group target project (location) and mapset. -

            SEE ALSO

            +

            SEE ALSO

            -i.ortho.photo
            -i.ortho.elev
            -i.ortho.camera
            -g.gui.photo2image
            -g.gui.image2target
            -i.ortho.init
            +i.ortho.photo, +i.ortho.elev, +i.ortho.camera, +g.gui.photo2image, +g.gui.image2target, +i.ortho.init, i.ortho.rectify
            -

            AUTHOR

            +

            AUTHOR

            Mike Baba, DBA Systems, Inc.
            GRASS development team, 2017 diff --git a/imagery/i.ortho.photo/i.ortho.transform/i.ortho.transform.html b/imagery/i.ortho.photo/i.ortho.transform/i.ortho.transform.html index 4bb60a9c633..9eb8b293b33 100644 --- a/imagery/i.ortho.photo/i.ortho.transform/i.ortho.transform.html +++ b/imagery/i.ortho.photo/i.ortho.transform/i.ortho.transform.html @@ -5,7 +5,7 @@

            DESCRIPTION

            If coordinates are given with the input file option or fed from -stdin, both the input and the output format is "x y z" with one +stdin, both the input and the output format is "x y z" with one coordinate pair per line. Reverse transform is performed with the -r flag. @@ -28,7 +28,7 @@

            SEE ALSO

            TODO

            -Update this document with x,y,z<->E,N,H information +Update this document with x,y,z<->E,N,H information

            AUTHORS

            diff --git a/imagery/i.pca/main.c b/imagery/i.pca/main.c index 427cf5f8730..8cc6c4f58fe 100644 --- a/imagery/i.pca/main.c +++ b/imagery/i.pca/main.c @@ -314,6 +314,7 @@ static int calc_mu_cov(int *fds, double **covar, double *mu, double *stddev, DCELL **rowbuf = (DCELL **)G_malloc(bands * sizeof(DCELL *)); double **sum2 = (double **)G_calloc(bands, sizeof(double *)); double *sumsq, *sd, *sum; + int ret = 1; if (stddev) { sumsq = (double *)G_calloc(bands, sizeof(double)); @@ -358,8 +359,10 @@ static int calc_mu_cov(int *fds, double **covar, double *mu, double *stddev, } G_percent(1, 1, 1); - if (count < 2) - return 0; + if (count < 2) { + ret = 0; + goto free_exit; + } for (i = 0; i < bands; i++) { if (stddev) { @@ -378,22 +381,21 @@ static int calc_mu_cov(int *fds, double **covar, double *mu, double *stddev, if (j != i) covar[j][i] = covar[i][j]; } - - G_free(sum2[i]); - G_free(rowbuf[i]); } for (i = 0; i < bands; i++) mu[i] = sum[i] / count; +free_exit: + for (i = 0; i < bands; i++) { + G_free(sum2[i]); + G_free(rowbuf[i]); + } G_free(rowbuf); - G_free(sum2); - if (sd) - G_free(sd); - if (sumsq) - G_free(sumsq); + G_free(sd); + G_free(sumsq); - return 1; + return ret; } static int write_pca(double **eigmat, double *mu, double *stddev, int *inp_fd, @@ -571,6 +573,8 @@ static int write_pca(double **eigmat, double *mu, double *stddev, int *inp_fd, G_free(min); G_free(max); G_free(old_range); + G_free(pcs); + G_free(out_fd); return 0; } diff --git a/imagery/i.rectify/i.rectify.html b/imagery/i.rectify/i.rectify.html index 7ef374b948e..6bfc49dd4c1 100644 --- a/imagery/i.rectify/i.rectify.html +++ b/imagery/i.rectify/i.rectify.html @@ -11,8 +11,8 @@

            DESCRIPTION

            are first, second, and third order polynomial and thin plate spline. Thin plate spline is recommended for ungeoreferenced satellite imagery where ground control points (GCPs) are included. Examples are -NOAA/AVHRR -and ENVISAT +NOAA/AVHRR +and ENVISAT imagery which include throusands of GCPs.

            @@ -66,11 +66,11 @@

            Coordinate transformation

            Linear affine transformation (1st order transformation)

            -
            x' = ax + by + c -
            y' = Ax + By + C +
            x' = ax + by + c +
            y' = Ax + By + C
            -The a,b,c,A,B,C are determined by least squares regression +The a, b, c, A, B, C are determined by least squares regression based on the control points entered. This transformation applies scaling, translation and rotation. It is NOT a general purpose rubber-sheeting like TPS, nor is it ortho-photo @@ -144,9 +144,9 @@

            Resampling method

            In the bilinear, cubic and lanczos methods, if any of the surrounding cells used to interpolate the new cell value are NULL, the resulting cell will be NULL, even if the nearest cell is not NULL. This will cause some thinning along NULL borders, -such as the coasts of land areas in a DEM. The bilinear_f, cubic_f and lanczos_f -interpolation methods can be used if thinning along NULL edges is not desired. -These methods "fall back" to simpler interpolation methods along NULL borders. +such as the coasts of land areas in a DEM. The bilinear_f, cubic_f +and lanczos_f interpolation methods can be used if thinning along NULL edges is +not desired. These methods "fall back" to simpler interpolation methods along NULL borders. That is, from lanczos to cubic to bilinear to nearest.

            If nearest neighbor assignment is used, the output map has the same raster format as the input map. If any of the other interpolations is used, the @@ -161,9 +161,9 @@

            Resampling method

            NOTES

            If i.rectify starts normally but after some time the following text is seen: -
            +
            ERROR: Error writing segment file -

            +

            the user may try the -c flag or the module needs more free space on the hard drive. @@ -179,7 +179,9 @@

            SEE ALSO

            v.proj, i.group, i.target -
            + +
            + Ground Control Points Manager diff --git a/imagery/i.segment/i.segment.html b/imagery/i.segment/i.segment.html index 43d5e5fde68..74de8f9177a 100644 --- a/imagery/i.segment/i.segment.html +++ b/imagery/i.segment/i.segment.html @@ -1,5 +1,8 @@

            DESCRIPTION

            +i.segment identifies segments (objects) from +imagery data. +

            Image segmentation or object recognition is the process of grouping similar pixels into unique segments, also referred to as objects. Boundary and region based algorithms are described in the literature, @@ -64,10 +67,10 @@

            Calculation Formulas

            In future, the distance calculation will also take into account the shape characteristics of the segments. The normal distances are then multiplied by the input radiometric weight. Next an additional -contribution is added: (1-radioweight) * {smoothness * smoothness -weight + compactness * (1-smoothness weight)}, -where compactness = Perimeter Length / sqrt( Area ) -and smoothness = Perimeter Length / Bounding Box. The +contribution is added: (1-radioweight) * {smoothness * smoothness +weight + compactness * (1-smoothness weight)}, +where compactness = Perimeter Length / sqrt( Area ) +and smoothness = Perimeter Length / Bounding Box. The perimeter length is estimated as the number of pixel sides the segment has. @@ -175,7 +178,7 @@

            Segmentation of RGB orthophoto

            -The output ortho_segs_l4 with threshold=0.2 still has +The output ortho_segs_l4 with threshold=0.2 still has too many segments, but the output with threshold=0.3 has too few segments. A threshold value of 0.25 seems to be a good choice. There is also some noise in the image, lets next force all segments smaller diff --git a/imagery/i.signatures/i.signatures.html b/imagery/i.signatures/i.signatures.html index 403269ec84c..7f3ba1920e7 100644 --- a/imagery/i.signatures/i.signatures.html +++ b/imagery/i.signatures/i.signatures.html @@ -1,6 +1,6 @@

            DESCRIPTION

            -i.signatures module allows to manage signature files: +i.signatures module allows managing signature files:
            • "sig" – generated by i.gensig for i.maxlik
            • diff --git a/imagery/i.smap/i.smap.html b/imagery/i.smap/i.smap.html index 14a883efc28..4a57e900378 100644 --- a/imagery/i.smap/i.smap.html +++ b/imagery/i.smap/i.smap.html @@ -10,18 +10,17 @@

              DESCRIPTION

              i.smap has two modes of operation. The first mode -is the sequential maximum a posteriori (SMAP) mode -[1,2]. The SMAP +is the sequential maximum a posteriori (SMAP) mode (see +Bouman and Shapiro, 1992; Bouman and Shapiro, 1994). The SMAP segmentation algorithm attempts to improve segmentation accuracy by segmenting the image into regions rather than -segmenting each pixel separately -(see NOTES). +segmenting each pixel separately (see NOTES below).

              The second mode is the more conventional maximum likelihood (ML) classification which classifies each pixel separately, but requires somewhat less computation. This mode is selected with -the -m flag (see below). +the -m flag (see below).

              OPTIONS

              @@ -30,9 +29,7 @@

              Flags:

              -m
              Use maximum likelihood estimation (instead of smap). -Normal operation is to use SMAP estimation (see -NOTES). - +Normal operation is to use SMAP estimation (see NOTES below).

              Parameters:

              @@ -57,7 +54,7 @@

              Parameters:

              statistics) for the classes to be identified in the image. This signature file is produced by the program i.gensigset -(see NOTES). +(see NOTES below).
              blocksize=value @@ -102,7 +99,7 @@

              Parameters:

              -

              NOTES

              +

              NOTES

              The SMAP algorithm exploits the fact that nearby pixels in an image are likely to have the same class. It works by @@ -129,8 +126,9 @@

              NOTES

              The module i.smap does not support MASKed or NULL cells. Therefore it might be necessary to create a copy of the classification results -using e.g. r.mapcalc: -

              +using e.g. r.mapcalc:
              +

              +

               r.mapcalc "MASKed_map = classification_results"
               
              @@ -198,16 +196,16 @@

              REFERENCES

            • C. Bouman and M. Shapiro, "Multispectral Image Segmentation using a Multiscale Image Model", Proc. of IEEE Int'l Conf. on Acoust., Speech and Sig. Proc., -pp. III-565 - III-568, San Francisco, California, March 23-26, 1992. +pp. III-565 - III-568, San Francisco, California, March 23-26, 1992.
            • C. Bouman and M. Shapiro 1994, "A Multiscale Random Field Model for Bayesian Image Segmentation", IEEE Trans. on Image Processing., 3(2), 162-177" -(PDF) +(PDF)
            • McCauley, J.D. and B.A. Engel 1995, "Comparison of Scene Segmentations: SMAP, ECHO and Maximum Likelihood", -IEEE Trans. on Geoscience and Remote Sensing, 33(6): 1313-1316. +IEEE Trans. on Geoscience and Remote Sensing, 33(6): 1313-1316.

            SEE ALSO

            @@ -216,10 +214,10 @@

            SEE ALSO

            r.support for setting semantic labels,
            -i.group for creating groups and subgroups +i.group for creating groups and subgroups,
            r.mapcalc -to copy classification result in order to cut out MASKed subareas +to copy classification result in order to cut out MASKed subareas,
            i.gensigset to generate the signature file required by this program diff --git a/imagery/i.svm.predict/i.svm.predict.html b/imagery/i.svm.predict/i.svm.predict.html index 3ddf4f33879..f0dbcf511cb 100644 --- a/imagery/i.svm.predict/i.svm.predict.html +++ b/imagery/i.svm.predict/i.svm.predict.html @@ -72,11 +72,6 @@

            REFERENCES

      -

      AUTHORS

      +

      AUTHOR

      Maris Nartiss, University of Latvia. - - diff --git a/imagery/i.svm.train/i.svm.train.html b/imagery/i.svm.train/i.svm.train.html index e25397d7ca6..d0ea9b8ba83 100644 --- a/imagery/i.svm.train/i.svm.train.html +++ b/imagery/i.svm.train/i.svm.train.html @@ -101,11 +101,6 @@

      REFERENCES

      -

      AUTHORS

      +

      AUTHOR

      Maris Nartiss, University of Latvia. - - diff --git a/imagery/i.topo.corr/main.c b/imagery/i.topo.corr/main.c index 1f2b7988d26..42422117063 100644 --- a/imagery/i.topo.corr/main.c +++ b/imagery/i.topo.corr/main.c @@ -114,7 +114,10 @@ int main(int argc, char *argv[]) Rast_get_window(&window); azimuth = atof(azim->answer); /* Warning: make buffers and output after set window */ - strcpy(dem.name, base->answer); + if (G_strlcpy(dem.name, base->answer, sizeof(dem.name)) >= + sizeof(dem.name)) { + G_fatal_error(_("DEM name <%s> is too long"), base->answer); + } /* Set window to DEM file */ Rast_get_window(&window); Rast_get_cellhd(dem.name, "", &hd_dem); @@ -122,7 +125,10 @@ int main(int argc, char *argv[]) dem.fd = Rast_open_old(dem.name, ""); dem.type = Rast_get_map_type(dem.fd); /* Open and buffer of the output file */ - strcpy(out.name, output->answer); + if (G_strlcpy(out.name, output->answer, sizeof(out.name)) >= + sizeof(out.name)) { + G_fatal_error(_("Output name <%s> is too long"), output->answer); + } out.fd = Rast_open_new(output->answer, DCELL_TYPE); out.rast = Rast_allocate_buf(out.type); /* Open and buffer of the elevation file */ @@ -169,7 +175,11 @@ int main(int argc, char *argv[]) for (i = 0; input->answers[i] != NULL; i++) { G_message(_("Band %s: "), input->answers[i]); /* Abre fichero de bandas y el de salida */ - strcpy(band.name, input->answers[i]); + if (G_strlcpy(band.name, input->answers[i], sizeof(band.name)) >= + sizeof(band.name)) { + G_fatal_error(_("Band name <%s> is too long"), + input->answers[i]); + } Rast_get_cellhd(band.name, "", &hd_band); Rast_set_window( &hd_band); /* Antes de out_open y allocate para mismo size */ diff --git a/imagery/i.vi/i.vi.html b/imagery/i.vi/i.vi.html index 3597dc5f500..0ed45b41d2d 100644 --- a/imagery/i.vi/i.vi.html +++ b/imagery/i.vi/i.vi.html @@ -517,7 +517,7 @@

      Preparation: DN to reflectance

      product_date=2004-02-12 gain=HHHLHLHHL
      -The resulting Landsat channels are names lsat7_2002_toar.1 .. lsat7_2002_toar.8. +The resulting Landsat channels are names lsat7_2002_toar.1 .. lsat7_2002_toar.8.

      Calculation of NDVI

      @@ -578,15 +578,15 @@

      Calculation of GARI

      NOTES

      -Originally from kepler.gps.caltech.edu (FAQ): +Originally from kepler.gps.caltech.edu (FAQ):

      A FAQ on Vegetation in Remote Sensing
      Written by Terrill W. Ray, Div. of Geological and Planetary Sciences, California Institute of Technology, email: terrill@mars1.gps.caltech.edu

      Snail Mail: Terrill Ray
      - Division of Geological and Planetary Sciences
      - Caltech, Mail Code 170-25
      - Pasadena, CA 91125 + Division of Geological and Planetary Sciences
      + Caltech, Mail Code 170-25
      + Pasadena, CA 91125

      REFERENCES

      diff --git a/imagery/i.zc/i.zc.html b/imagery/i.zc/i.zc.html index b962c9b7b8d..f3dec69e4a1 100644 --- a/imagery/i.zc/i.zc.html +++ b/imagery/i.zc/i.zc.html @@ -13,16 +13,16 @@

      NOTES

      The procedure to find the "edges" in the image is as follows:
        -
      1. The Fourier transform of the image is taken, +
      2. The Fourier transform of the image is taken,
      3. The Fourier transform of the Laplacian of a two-dimensional -Gaussian function is used to filter the transformed image, -
      4. The result is run through an inverse Fourier transform, +Gaussian function is used to filter the transformed image,
      5. +
      6. The result is run through an inverse Fourier transform,
      7. The resulting image is traversed in search of places where the image -changes from positive to negative or from negative to positive, +changes from positive to negative or from negative to positive,
      8. Each cell in the map where the value crosses zero (with a change in value greater than the threshold value) is marked as an edge and an orientation is assigned to it. -The resulting raster map layer is output. +The resulting raster map layer is output.
      The width= parameter determines the x-y extent of the diff --git a/imagery/imageryintro.html b/imagery/imageryintro.html index c0ef2a48616..b9258352b86 100644 --- a/imagery/imageryintro.html +++ b/imagery/imageryintro.html @@ -30,9 +30,9 @@

      Image processing in general

      by the sensor of the satellite platform is encoded in 8 or more bits. This energy is called radiance-at-sensor. To obtain physical values from DNs, satellite image providers use a linear transform -equation (y = a * x + b) to encode the radiance-at-sensor +equation (y = a * x + b) to encode the radiance-at-sensor in 8 to 16 bits. DNs can be turned back into physical values by -applying the reverse formula (x = (y - b) / a). +applying the reverse formula (x = (y - b) / a).

      The GRASS GIS module i.landsat.toar easily transforms Landsat DN to radiance-at-sensor (top of atmosphere, @@ -48,7 +48,7 @@

      Image processing in general

      interaction with the sun energy reflected back into space by ground/vegetation/soil needs to be corrected. The need of removing atmospheric artifacts stems from the fact that the -atmosphericic conditions are changing over time. Hence, to gain +atmospheric conditions are changing over time. Hence, to gain comparability between Earth surface images taken at different times, atmospheric need to be removed converting at-sensor values which are top of atmosphere to surface reflectance values. @@ -76,10 +76,10 @@

      Image processing in general

      As a general rule in GRASS:
      1. Raster/imagery output maps have their bounds and resolution equal - to those of the current region. + to those of the current region.
      2. Raster/imagery input maps are automatically cropped/padded and rescaled (using nearest-neighbor resampling) to match the current - region. + region.
      @@ -114,7 +114,7 @@

      Semantic label information

      a different group with identical semantic labels.
      -
      +GRASS GIS band references scheme
      New enhanced classification workflow involving semantic labels. @@ -171,7 +171,7 @@

      Calculation of vegetation indices

      status with NDVI, the Red and the Near Infrared channels (NIR) are taken as used as input for simple map algebra in the GRASS command r.mapcalc -(ndvi = 1.0 * (nir - red)/(nir + red)). With +(ndvi = 1.0 * (nir - red)/(nir + red)). With r.colors an optimized "ndvi" color table can be assigned afterward. Also other vegetation indices can be generated likewise. @@ -204,16 +204,19 @@

      Image classification

      or g.gui.iclass, i.maxlik) using the Maximum Likelihood classification method +
    16. Combined radiometric/geometric (segmentation based) classification: +
    17. Object-oriented classification: +
    18. Kappa statistic can be calculated to validate the results @@ -246,8 +249,8 @@

      Radiometric corrections

      Time series processing

      -GRASS also offers support for time series processing (r.series). Statistics can be derived from a +GRASS also offers support for time series processing +(r.series). Statistics can be derived from a set of coregistered input maps such as multitemporal satellite data. The common univariate statistics and also linear regression can be calculated. @@ -293,4 +296,5 @@

      See also

    19. Introduction into temporal data processing
    20. Database management
    21. Projections and spatial transformations
    22. +
    23. Graphical User Interface
    24. diff --git a/include/Make/DB.make b/include/Make/DB.make index a8ed51536f3..797da921ee5 100644 --- a/include/Make/DB.make +++ b/include/Make/DB.make @@ -10,7 +10,7 @@ include $(MODULE_TOPDIR)/include/Make/Compile.make dbmi: $(DBDRIVERDIR)/$(PGM)$(EXE) db_html -db_html: $(HTMLDIR)/grass-$(PGM).html $(MANDIR)/grass-$(PGM).$(MANSECT) +db_html: $(HTMLDIR)/grass-$(PGM).html $(MANDIR)/grass-$(PGM).$(MANSECT) # $(MDDIR)/source/grass-$(PGM).md $(DBDRIVERDIR)/$(PGM)$(EXE): $(ARCH_OBJS) $(DEPENDENCIES) $(call linker) diff --git a/include/Make/Grass.make b/include/Make/Grass.make index 137a45d1955..17ce25fcb71 100644 --- a/include/Make/Grass.make +++ b/include/Make/Grass.make @@ -59,6 +59,7 @@ DOCSDIR = $(ARCH_DISTDIR)/docs ETC = $(ARCH_DISTDIR)/etc GUIDIR = $(ARCH_DISTDIR)/gui HTMLDIR = $(ARCH_DISTDIR)/docs/html +MDDIR = $(ARCH_DISTDIR)/docs/mkdocs SCRIPTDIR = $(ARCH_DISTDIR)/scripts MSG_DIR = $(ARCH_DISTDIR)/etc/msgs MO_DIR = $(ARCH_DISTDIR)/locale diff --git a/include/Make/GuiScript.make b/include/Make/GuiScript.make index dbbc7609882..7451e2b693d 100644 --- a/include/Make/GuiScript.make +++ b/include/Make/GuiScript.make @@ -10,6 +10,8 @@ include $(MODULE_TOPDIR)/include/Make/HtmlRules.make MODULES := $(patsubst g.gui.%.py,%,$(wildcard g.gui.*.py)) CMDHTML := $(patsubst %,$(HTMLDIR)/g.gui.%.html,$(MODULES)) GUIHTML := $(patsubst %,$(HTMLDIR)/wxGUI.%.html,$(MODULES)) +CMDMAN := $(patsubst %,$(MDDIR)/source/g.gui.%.md,$(MODULES)) +GUIMAN := $(patsubst %,$(MDDIR)/source/wxGUI.%.md,$(MODULES)) ifdef MINGW SCRIPTEXT = .py BATFILES := $(patsubst %,$(BIN)/g.gui.%.bat,$(MODULES)) @@ -19,26 +21,40 @@ BATFILES = endif PYFILES := $(patsubst %,$(SCRIPTDIR)/g.gui.%$(SCRIPTEXT),$(MODULES)) -guiscript: $(IMGDST) $(PYFILES) $(BATFILES) +guiscript: $(IMGDST) $(IMGDST_MD) $(PYFILES) $(BATFILES) # we cannot use cross-compiled g.parser for generating html files ifndef CROSS_COMPILING $(MAKE) $(CMDHTML) -rm -f g.gui.*.tmp.html $(MAKE) $(GUIHTML) +# $(MAKE) $(CMDMAN) +# $(MAKE) $(GUIMAN) endif $(HTMLDIR)/g.gui.%.html: g.gui.%.html g.gui.%.tmp.html | $(HTMLDIR) VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) MODULE_TOPDIR=$(MODULE_TOPDIR) \ $(PYTHON) $(GISBASE)/utils/mkhtml.py g.gui.$* $(GRASS_VERSION_DATE) > $@ +$(MDDIR)/source/g.gui.%.md: g.gui.%.md g.gui.%.tmp.md | $(MDDIR) + VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) MODULE_TOPDIR=$(MODULE_TOPDIR) \ + $(PYTHON) $(GISBASE)/utils/mkmarkdown.py g.gui.$* $(GRASS_VERSION_DATE) > $@ + $(HTMLDIR)/wxGUI.%.html: g.gui.%.html | $(HTMLDIR) -rm -f g.gui.$*.tmp.html VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) MODULE_TOPDIR=$(MODULE_TOPDIR) \ $(PYTHON) $(GISBASE)/utils/mkhtml.py g.gui.$* $(GRASS_VERSION_DATE) > $@ +$(MDDIR)/source/wxGUI.%.md: g.gui.%.md | $(MDDIR) + -rm -f g.gui.$*.tmp.md + VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) MODULE_TOPDIR=$(MODULE_TOPDIR) \ + $(PYTHON) $(GISBASE)/utils/mkmarkdown.py g.gui.$* $(GRASS_VERSION_DATE) > $@ + g.gui.%.tmp.html: $(SCRIPTDIR)/g.gui.% $(call htmldesc,$<,$@) +g.gui.%.tmp.md: $(SCRIPTDIR)/g.gui.% + $(call mddesc,$<,$@) + $(SCRIPTDIR)/g.gui.%$(SCRIPTEXT): g.gui.%.py | $(SCRIPTDIR) $(INSTALL) $< $@ diff --git a/include/Make/Html.make b/include/Make/Html.make index fa736d3405a..335c841e149 100644 --- a/include/Make/Html.make +++ b/include/Make/Html.make @@ -7,19 +7,26 @@ $(HTMLDIR)/%.html: %.html %.tmp.html $(HTMLSRC) $(IMGDST) | $(HTMLDIR) VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) MODULE_TOPDIR=$(MODULE_TOPDIR) \ $(PYTHON) $(GISBASE)/utils/mkhtml.py $* > $@ +$(MDDIR)/source/%.md: %.md %.tmp.md $(HTMLSRC) $(IMGDST_MD) | $(MDDIR) + VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) MODULE_TOPDIR=$(MODULE_TOPDIR) \ + $(PYTHON) $(GISBASE)/utils/mkmarkdown.py $* > $@ + $(MANDIR)/%.$(MANSECT): $(HTMLDIR)/%.html $(HTML2MAN) "$<" "$@" %.tmp.html: $(HTMLSRC) if [ "$(HTMLSRC)" != "" ] ; then $(call htmldesc,$<,$@) ; fi +%.tmp.md: $(HTMLSRC) + if [ "$(HTMLSRC)" != "" ] ; then $(call mddesc,$<,$@) ; fi + ifdef CROSS_COMPILING html: else -html: $(HTMLDIR)/$(PGM).html $(MANDIR)/$(PGM).$(MANSECT) +html: $(HTMLDIR)/$(PGM).html $(MANDIR)/$(PGM).$(MANSECT) # $(MDDIR)/source/$(PGM).md endif diff --git a/include/Make/HtmlRules.make b/include/Make/HtmlRules.make index 0c79a05a2ad..cdeaf317b98 100644 --- a/include/Make/HtmlRules.make +++ b/include/Make/HtmlRules.make @@ -3,13 +3,20 @@ htmldesc = $(call run_grass,$(1) --html-description < /dev/null | grep -v '\|\|
      ' > $(2)) +mddesc = $(call run_grass,$(1) --md-description < /dev/null > $(2)) + IMGSRC := $(wildcard *.png) $(wildcard *.jpg) $(wildcard *.gif) IMGDST := $(patsubst %,$(HTMLDIR)/%,$(IMGSRC)) +IMGDST_MD := $(patsubst %,$(MDDIR)/source/%,$(IMGSRC)) ifneq ($(strip $(IMGDST)),) .SECONDARY: $(IMGDST) endif +ifneq ($(strip $(IMGDST_MD)),) +.SECONDARY: $(IMGDST_MD) +endif + $(HTMLDIR)/%.png: %.png | $(HTMLDIR) $(INSTALL_DATA) $< $@ @@ -18,3 +25,12 @@ $(HTMLDIR)/%.jpg: %.jpg | $(HTMLDIR) $(HTMLDIR)/%.gif: %.gif | $(HTMLDIR) $(INSTALL_DATA) $< $@ + +$(MDDIR)/source/%.png: %.png | $(MDDIR) + $(INSTALL_DATA) $< $@ + +$(MDDIR)/source/%.jpg: %.jpg | $(MDDIR) + $(INSTALL_DATA) $< $@ + +$(MDDIR)/source/%.gif: %.gif | $(MDDIR) + $(INSTALL_DATA) $< $@ diff --git a/include/Make/NoHtml.make b/include/Make/NoHtml.make index 915d2912c30..5742ff6268f 100644 --- a/include/Make/NoHtml.make +++ b/include/Make/NoHtml.make @@ -2,5 +2,8 @@ $(HTMLDIR)/$(PGM).html: @echo no HTML documentation available +$(MDDIR)/source/$(PGM).md: + @echo no Markdown documentation available + $(MANDIR)/$(PGM).$(MANSECT): @echo no manual page available diff --git a/include/Make/Rules.make b/include/Make/Rules.make index 3681c667a02..9e765fab7a4 100644 --- a/include/Make/Rules.make +++ b/include/Make/Rules.make @@ -6,7 +6,7 @@ first: pre default ARCH_DIRS = $(ARCH_DISTDIR) $(ARCH_BINDIR) $(ARCH_INCDIR) $(ARCH_LIBDIR) \ $(BIN) $(ETC) \ $(DRIVERDIR) $(DBDRIVERDIR) $(FONTDIR) $(DOCSDIR) $(HTMLDIR) \ - $(MANBASEDIR) $(MANDIR) $(UTILSDIR) + $(MDDIR) $(MDDIR)/source $(MANBASEDIR) $(MANDIR) $(UTILSDIR) pre: | $(ARCH_DIRS) diff --git a/include/VERSION b/include/VERSION index c4ace75dbf6..97aca63225c 100644 --- a/include/VERSION +++ b/include/VERSION @@ -1,4 +1,4 @@ 8 5 0dev -2024 +2025 diff --git a/include/grass/config.h.in b/include/grass/config.h.in index 2451d8cc2b8..8e5eb161a45 100644 --- a/include/grass/config.h.in +++ b/include/grass/config.h.in @@ -152,9 +152,6 @@ /* Define to 1 if PDAL exists. */ #undef HAVE_PDAL -/* Define to 1 if PDAL NoFilenameWriter is present. */ -#undef HAVE_PDAL_NOFILENAMEWRITER - /* Define to 1 if glXCreateGLXPixmap exists. */ #undef HAVE_PIXMAPS diff --git a/include/grass/gis.h b/include/grass/gis.h index fcc53535f47..e6b818135e4 100644 --- a/include/grass/gis.h +++ b/include/grass/gis.h @@ -6,7 +6,7 @@ * PURPOSE: This file contains definitions of variables and data types * for use with most, if not all, Grass programs. This file is * usually included in every Grass program. - * COPYRIGHT: (C) 2000-2024 by the GRASS Development Team + * COPYRIGHT: (C) 2000-2025 by the GRASS Development Team * * This program is free software under the GNU General Public * License (>=v2). Read the file COPYING that comes with GRASS diff --git a/include/grass/iostream/ami_sort_impl.h b/include/grass/iostream/ami_sort_impl.h index bdd466d2b62..ebe22358da8 100644 --- a/include/grass/iostream/ami_sort_impl.h +++ b/include/grass/iostream/ami_sort_impl.h @@ -112,7 +112,7 @@ size_t makeRun_Block(AMI_STREAM *instream, T *data, unsigned int run_size, once, it reads it in blocks, sorts each block and then merges the blocks together. Note: it is not in place! it allocates another array of same size as data, writes the sorted run into it and - deteles data, and replaces data with outdata */ + deletes data, and replaces data with outdata */ template void makeRun(AMI_STREAM *instream, T *&data, int run_size, Compare *cmp) { diff --git a/lib/btree2/Makefile b/lib/btree2/Makefile index 84dda646327..627fc259bce 100644 --- a/lib/btree2/Makefile +++ b/lib/btree2/Makefile @@ -7,6 +7,7 @@ MOD_OBJS := $(filter-out try.o,$(AUTO_OBJS)) LIB = BTREE2 include $(MODULE_TOPDIR)/include/Make/Lib.make +include $(MODULE_TOPDIR)/include/Make/Doxygen.make HEADERS := $(ARCH_INCDIR)/kdtree.h @@ -17,3 +18,5 @@ headers: $(HEADERS) $(ARCH_INCDIR)/kdtree.h: kdtree.h $(INSTALL_DATA) $< $@ + +DOXNAME = btree2 diff --git a/lib/btree2/btree2.dox b/lib/btree2/btree2lib.dox similarity index 98% rename from lib/btree2/btree2.dox rename to lib/btree2/btree2lib.dox index 5f6b7515e0d..6e4d00b0eda 100644 --- a/lib/btree2/btree2.dox +++ b/lib/btree2/btree2lib.dox @@ -1,4 +1,4 @@ -/*! \page btree2 btree2 library +/*! \page btree2 GRASS Btree2 and k-d tree libraries \tableofcontents diff --git a/lib/cairodriver/cairodriver.html b/lib/cairodriver/cairodriver.html index 22f8e1264c8..2e294af5edd 100644 --- a/lib/cairodriver/cairodriver.html +++ b/lib/cairodriver/cairodriver.html @@ -7,7 +7,7 @@

      DESCRIPTION

      The Cairo driver generates PNG, BMP, PPM, PS, PDF or SVG images by GRASS display commands, using the -Cairo graphics library. +Cairo graphics library. The image format is selected from the extension of the output file. The Cairo driver is used for GRASS display commands by default if @@ -44,11 +44,11 @@

      Environment variables

    25. GRASS_RENDER_ANTIALIAS
      can be default, none, gray, or subpixel, corresponding to - cairo_antialias_t + cairo_antialias_t
    26. GRASS_RENDER_FILE=filename
      the name and format of the resulting image file, default is - map.png.
      + map.png.
      The image format is determined from the file extension.
      Supported bitmap formats:
        @@ -67,12 +67,12 @@

        Environment variables

        the Cairo library that GRASS was built with.)
      • GRASS_RENDER_FILE_READ
        - if TRUE, the Cairo driver will initialize the image from + if TRUE, the Cairo driver will initialize the image from the contents of GRASS_RENDER_FILE.
        (Note: This is only supported for bitmap formats)
      • GRASS_RENDER_FILE_MAPPED
        - if TRUE, the Cairo driver will map GRASS_RENDER_FILE as its framebuffer, + if TRUE, the Cairo driver will map GRASS_RENDER_FILE as its framebuffer, rather than using memory.
        (Note: This only works with BMP files.)
      • @@ -152,13 +152,13 @@

        NOTES

        The driver is still in development. Enable it by specifying --with-cairo when configuring GRASS. This requires a reasonably recent version of the Cairo libraries -and a working pkg-config. +and a working pkg-config.

        Antialiasing is enabled by default for bitmap formats. There is currently no way of disabling this.

        The resolution of the output images is defined by current region -extents. Use g.region -p to get the number of rows and cols +extents. Use g.region -p to get the number of rows and cols and use the environment variables to set the image size. If you would like a larger image, multiply both rows and cols by the same whole number to preserve the aspect ratio. diff --git a/lib/cluster/c_exec.c b/lib/cluster/c_exec.c index abc53d32fe5..043c67475c3 100644 --- a/lib/cluster/c_exec.c +++ b/lib/cluster/c_exec.c @@ -1,7 +1,7 @@ /*! \file cluster/c_exec.c - \brief Cluster library - Exectute clusterring + \brief Cluster library - Execute clusterring (C) 2001-2009 by the GRASS Development Team diff --git a/lib/db/README b/lib/db/README deleted file mode 100644 index 9dc8faf9ab6..00000000000 --- a/lib/db/README +++ /dev/null @@ -1,51 +0,0 @@ -/**************************************************************************** - * - * MODULE: DBMI library - * AUTHOR(S): Joel Jones (CERL/UIUC) - * Radim Blazek , - * Brad Douglas , - * Glynn Clements , - * Roberto Flor, Hamish Bowman , - * Markus Neteler , - * Huidae Cho , - * Paul Kelly , - * Martin Landa , - * Moritz Lennert , - * Daniel Calvelo Aros , - * Bernhard Reiter , - * Alex Shevlakov - * PURPOSE: database management functions for modules and drivers - * COPYRIGHT: (C) 2003-2006 by the GRASS Development Team - * - * This program is free software under the GNU General Public - * License (>=v2). Read the file COPYING that comes with GRASS - * for details. - * - *****************************************************************************/ - -DBMI Library - -Original author: Joel Jones (jjones * zorro.cecer.army.mil | jjones * uiuc.edu ) - Ref: https://lists.osgeo.org/pipermail/grass-dev/1995-February/002015.html - -Directory contents: - -dbmi: DataBase Management Interface (db_*() functions) - dbmi_base: contains functions for modules, drivers (../../db/drivers/) - dbmi_client: contains functions for modules - dbmi_driver: contains functions for drivers (../../db/drivers/) - -sqlp: SQL parser library -stubs: stubs for unimplemented DB functions - -The DBMI drivers are stored in -../../db/drivers/ - -The DBMI user modules are stored in -../../db/base/ - -NOTE: - Please read db/drivers/README - -To generate dbmi_driver/dbstubs.h automatically, run './mk_dbstubs_h.sh' in -dbmi_driver/ directory. diff --git a/lib/db/README.md b/lib/db/README.md new file mode 100644 index 00000000000..227d628fa48 --- /dev/null +++ b/lib/db/README.md @@ -0,0 +1,59 @@ +## DBMI library + +### Purpose + +Database management functions for modules and drivers. + +### Authors + +Original author: + +- Joel Jones (CERL/UIUC) (jjones zorro.cecer.army.mil) + +Ref: + +Further authors: + +- Radim Blazek (radim.blazek gmail.com) +- Brad Douglas (rez touchofmadness.com) +- Glynn Clements (glynn gclements.plus.com) +- Roberto Flor, Hamish Bowman (hamish_b yahoo.com) +- Markus Neteler (neteler itc.it) +- Huidae Cho (grass4u gmail.com) +- Paul Kelly (paul-grass stjohnspoint.co.uk) +- Martin Landa (landa.martin gmail.com) +- Moritz Lennert (mlennert club.worldonline.be) +- Daniel Calvelo Aros (dca.gis gmail.com) +- Bernhard Reiter (bernhard intevation.de) +- Alex Shevlakov (sixote yahoo.com) + +### Copyright + +(C) 2003-2024 by the GRASS Development Team + +### License + +This program is free software under the GNU General Public +License (>=v2). Read the file COPYING that comes with GRASS +for details. + +### Directory contents + +- `dbmi/`: DataBase Management Interface (`db_*()` functions) + - `dbmi_base/`: contains functions for modules, drivers (`../../db/drivers/`) + - `dbmi_client/`: contains functions for modules + - `dbmi_driver/`: contains functions for drivers (`../../db/drivers/`) +- `sqlp/`: SQL parser library +- `stubs/`: stubs for unimplemented DB functions + +The DBMI drivers are stored in +`../../db/drivers/` + +The DBMI user modules are stored in +`../../db/base/` + +NOTE: +Please read db/drivers/README.md + +To generate `dbmi_driver/dbstubs.h` automatically, run `./mk_dbstubs_h.sh` in +`dbmi_driver/` directory (GRASS GIS 6). diff --git a/lib/db/dbmi_client/c_update.c b/lib/db/dbmi_client/c_update.c index 9a8a0d6e4cd..399f19d8155 100644 --- a/lib/db/dbmi_client/c_update.c +++ b/lib/db/dbmi_client/c_update.c @@ -1,7 +1,7 @@ /*! * \file db/dbmi_client/c_update.c * - * \brief DBMI Library (client) - update statemets + * \brief DBMI Library (client) - update statements * * (C) 1999-2008 by the GRASS Development Team * diff --git a/lib/db/dbmi_driver/d_update.c b/lib/db/dbmi_driver/d_update.c index 5d601679082..ed7a501a7c4 100644 --- a/lib/db/dbmi_driver/d_update.c +++ b/lib/db/dbmi_driver/d_update.c @@ -1,7 +1,7 @@ /*! * \file db/dbmi_driver/d_update.c * - * \brief DBMI Library (driver) - update statemets + * \brief DBMI Library (driver) - update statements * * (C) 1999-2008 by the GRASS Development Team * diff --git a/lib/db/sqlp/README b/lib/db/sqlp/README deleted file mode 100644 index 9a23b0925e8..00000000000 --- a/lib/db/sqlp/README +++ /dev/null @@ -1,18 +0,0 @@ -sqlp is SQL parser library - -sqp is intended as library for simple dbmi drivers (like dbf, txt). -yac.y and lex.l was originally stolen from unixODBC 3/2001 and modified. - -An input may be subset of SQL statements. Currently supported: -SELECT FROM WHERE -INSERT INTO -UPDATE WHERE -DELETE FROM WHERE -CREATE TABLE -DROP TABLE -[...] - -New types have to be added in yac.y, lex.l, print.c and -../../../include/sqlp.h . -In ./test/ is a test program to the the SQL parser (see -README there). diff --git a/lib/db/sqlp/README.md b/lib/db/sqlp/README.md new file mode 100644 index 00000000000..852126fb157 --- /dev/null +++ b/lib/db/sqlp/README.md @@ -0,0 +1,24 @@ +## SQL parser library + +sqlp is the SQL parser library. + +sqp is intended as library for simple dbmi drivers (like dbf, txt). +`yac.y` and `lex.l` was originally stolen from unixODBC 3/2001 and modified. + +An input may be subset of SQL statements. Currently supported: + +```sql +SELECT FROM WHERE +INSERT INTO +UPDATE WHERE +DELETE FROM WHERE +CREATE TABLE +DROP TABLE +[...] +``` + +New types have to be added in `yac.y`, `lex.l`, `print.c` and +`../../../include/sqlp.h`. + +In `./test/` is a test program to the the SQL parser (see +README.md there). diff --git a/lib/db/sqlp/sql.html b/lib/db/sqlp/sql.html index eaed6a74b70..1fd070b7d3c 100644 --- a/lib/db/sqlp/sql.html +++ b/lib/db/sqlp/sql.html @@ -29,19 +29,19 @@

        Database drivers

    27. Fig.: Land use/land cover clustering of LANDSAT scene (simplified)
      - + - + - + @@ -69,8 +69,8 @@

      Database table name restrictions

    28. Attempts to use a reserved SQL word (depends on database backend) as column or table name will cause a "SQL syntax error".
    29. -
    30. An error message such as "dbmi: Protocol - error" either indicates an invalid column name or an +
    31. An error message such as "dbmi: Protocol + error" either indicates an invalid column name or an unsupported column type (then the GRASS SQL parser needs to be extended).
    32. DBF column names are limited to 10 characters (DBF API definition).
    33. @@ -151,7 +151,7 @@

      Example of null handling

      Update of attributes

      -Examples of complex expressions in updates (using v.db.* +Examples of complex expressions in updates (using v.db.* modules):
      @@ -164,7 +164,7 @@ 

      Update of attributes

      -Examples of more complex expressions in updates (using db.* +Examples of more complex expressions in updates (using db.* modules):

      @@ -237,4 +237,4 @@ 

      SEE ALSO

      AUTHOR

      -Radmin Blazek +Radim Blazek diff --git a/lib/db/sqlp/test/README b/lib/db/sqlp/test/README deleted file mode 100644 index d703f03bd0c..00000000000 --- a/lib/db/sqlp/test/README +++ /dev/null @@ -1,6 +0,0 @@ -Test of sql parser library. - -sqlptest reads sql statements (one per row) from standard -input and writes results of parser to standard output. - -Some test statemets are in ./test diff --git a/lib/db/sqlp/test/README.md b/lib/db/sqlp/test/README.md new file mode 100644 index 00000000000..a6aae0fe947 --- /dev/null +++ b/lib/db/sqlp/test/README.md @@ -0,0 +1,6 @@ +## Test of SQL parser library + +`sqlptest` reads SQL statements (one per row) from standard +input and writes results of parser to standard output. + +Some test statements are in `./test`. diff --git a/lib/display/tran_colr.c b/lib/display/tran_colr.c index 63975f18799..2c416abaecc 100644 --- a/lib/display/tran_colr.c +++ b/lib/display/tran_colr.c @@ -4,10 +4,12 @@ #include -#include #include -#include +#include +#include #include +#include + #include "driver.h" static struct color_rgb *colors; @@ -38,7 +40,9 @@ static int translate_or_add_color(const char *str) char lowerstr[MAX_COLOR_LEN]; /* Make the color string lowercase for display colors */ - strcpy(lowerstr, str); + if (G_strlcpy(lowerstr, str, sizeof(lowerstr)) >= sizeof(lowerstr)) { + G_fatal_error(_("String <%s> is too long"), str); + } G_chop(lowerstr); G_tolcase(lowerstr); diff --git a/lib/fonts/fonts/cyrilc.hmp b/lib/fonts/fonts/cyrilc.hmp index 12b63015753..c1d5c6f5086 100644 --- a/lib/fonts/fonts/cyrilc.hmp +++ b/lib/fonts/fonts/cyrilc.hmp @@ -19,4 +19,4 @@ 2801 2802 2823 2805 2806 2821 2804 2822 2809 2810 2811 2812 2813 2814 2815 2816 2832 2817 2818 2819 2820 2807 2803 2829 -2828 2808 2825 2830 2826 2824 2827 2832 +2828 2808 2825 2830 2826 2824 2827 2832 diff --git a/lib/fonts/fonts/fonts.table b/lib/fonts/fonts/fonts.table index d4d8486b5fa..f897f10384d 100644 --- a/lib/fonts/fonts/fonts.table +++ b/lib/fonts/fonts/fonts.table @@ -1,16 +1,16 @@ # # Descriptive Names for GRASS Stroke Fonts # -# This file maps a descriptive name to each font file, for use in the +# This file maps a descriptive name to each font file, for use in the # description field in the fontcap table (generated by g.mkfontcap). # # Format: Each line contains the filename followed by the vertical bar # character | and then the descriptive name. # -# Note that this file is used purely for the purpose of specifying +# Note that this file is used purely for the purpose of specifying # descriptive names for the Stroke fonts in this directory. # $GISBASE/etc/fontcap contains the list of fonts available to the GRASS -# display drivers, and any changes must be reflected there before they are +# display drivers, and any changes must be reflected there before they are # available for use - g.mkfontcap can do this automatically. # cyrilc.hmp|Cyrillic diff --git a/lib/gis/area_poly1.c b/lib/gis/area_poly1.c index 9d06690f970..a165876a229 100644 --- a/lib/gis/area_poly1.c +++ b/lib/gis/area_poly1.c @@ -186,7 +186,7 @@ double G_ellipsoid_polygon_area(const double *lon, const double *lat, int n) area = -area; /* kludge - if polygon circles the south pole the area will be - * computed as if it cirlced the north pole. The correction is + * computed as if it circled the north pole. The correction is * the difference between total surface area of the earth and * the "north pole" area. */ diff --git a/lib/gis/debug.c b/lib/gis/debug.c index 034ab8dc690..b0d42260528 100644 --- a/lib/gis/debug.c +++ b/lib/gis/debug.c @@ -72,8 +72,6 @@ int G_debug(int level, const char *msg, ...) G_init_debug(); if (grass_debug_level >= level) { - va_start(ap, msg); - filen = getenv("GRASS_DEBUG_FILE"); if (filen != NULL) { fd = fopen(filen, "a"); @@ -87,14 +85,14 @@ int G_debug(int level, const char *msg, ...) } fprintf(fd, "D%d/%d: ", level, grass_debug_level); + va_start(ap, msg); vfprintf(fd, msg, ap); + va_end(ap); fprintf(fd, "\n"); fflush(fd); if (filen != NULL) fclose(fd); - - va_end(ap); } return 1; diff --git a/lib/gis/env.c b/lib/gis/env.c index e35da07b999..fdb52b5d592 100644 --- a/lib/gis/env.c +++ b/lib/gis/env.c @@ -3,7 +3,7 @@ \brief GIS library - environment routines - (C) 2001-2024 by the GRASS Development Team + (C) 2001-2025 by the GRASS Development Team This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. diff --git a/lib/gis/gislib_cmdline_parsing.dox b/lib/gis/gislib_cmdline_parsing.dox index 2c21811ea03..60be9c3a392 100644 --- a/lib/gis/gislib_cmdline_parsing.dox +++ b/lib/gis/gislib_cmdline_parsing.dox @@ -4,7 +4,7 @@ diff --git a/lib/gis/lz4.c b/lib/gis/lz4.c index 1575be2c481..b2540a90438 100644 --- a/lib/gis/lz4.c +++ b/lib/gis/lz4.c @@ -1,6 +1,6 @@ /* LZ4 - Fast LZ compression algorithm - Copyright (C) 2011-2017, Yann Collet. + Copyright (C) 2011-2023, Yann Collet. BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) @@ -8,9 +8,9 @@ modification, are permitted provided that the following conditions are met: - * Redistributions of source code must retain the above copyright + * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above + * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. @@ -28,17 +28,18 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. You can contact the author at : - - LZ4 homepage : http://www.lz4.org - - LZ4 source repository : https://github.com/lz4/lz4 - */ + - LZ4 homepage : http://www.lz4.org + - LZ4 source repository : https://github.com/lz4/lz4 +*/ /*-************************************ * Tuning parameters **************************************/ /* * LZ4_HEAPMODE : - * Select how default compression functions will allocate memory for their hash - * table, in memory stack (0:default, fastest), or in memory heap (1:requires + * Select how stateless compression functions like `LZ4_compress_default()` + * allocate memory for their hash table, + * in memory stack (0:default, fastest), or in memory heap (1:requires * malloc()). */ #ifndef LZ4_HEAPMODE @@ -46,10 +47,16 @@ #endif /* - * ACCELERATION_DEFAULT : + * LZ4_ACCELERATION_DEFAULT : * Select "acceleration" for LZ4_compress_fast() when parameter value <= 0 */ -#define ACCELERATION_DEFAULT 1 +#define LZ4_ACCELERATION_DEFAULT 1 +/* + * LZ4_ACCELERATION_MAX : + * Any "acceleration" value higher than this threshold + * get treated as LZ4_ACCELERATION_MAX instead (fix #876) + */ +#define LZ4_ACCELERATION_MAX 65537 /*-************************************ * CPU Feature Detection @@ -58,20 +65,16 @@ * By default, access to unaligned memory is controlled by `memcpy()`, which is * safe and portable. Unfortunately, on some target/compiler combinations, the * generated assembly is sub-optimal. The below switch allow to select different - * access method for improved performance. - * Method 0 (default) : use `memcpy()`. Safe and portable. - * Method 1 : `__packed` statement. It depends on compiler extension - * (ie, not portable). - * This method is safe if your compiler supports it, and *generally* - * as fast or faster than `memcpy`. - * Method 2 : direct access. This method is portable but violate C standard. - * It can generate buggy code on targets which assembly generation - * depends on alignment. But in some circumstances, it's the only - * known way to get the most performance (ie GCC + ARMv6) - * See + * access method for improved performance. Method 0 (default) : use `memcpy()`. + * Safe and portable. Method 1 : `__packed` statement. It depends on compiler + * extension (ie, not portable). This method is safe if your compiler supports + * it, and *generally* as fast or faster than `memcpy`. Method 2 : direct + * access. This method is portable but violate C standard. It can generate buggy + * code on targets which assembly generation depends on alignment. But in some + * circumstances, it's the only known way to get the most performance (ie GCC + + * ARMv6) See * https://fastcompression.blogspot.fr/2015/08/accessing-unaligned-memory.html - * for details. - * Prefer these methods in priority order (0 > 1 > 2) + * for details. Prefer these methods in priority order (0 > 1 > 2) */ #ifndef LZ4_FORCE_MEMORY_ACCESS /* can be defined externally */ #if defined(__GNUC__) && \ @@ -79,7 +82,8 @@ defined(__ARM_ARCH_6K__) || defined(__ARM_ARCH_6Z__) || \ defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_6T2__)) #define LZ4_FORCE_MEMORY_ACCESS 2 -#elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__) +#elif (defined(__INTEL_COMPILER) && !defined(_WIN32)) || defined(__GNUC__) || \ + defined(_MSC_VER) #define LZ4_FORCE_MEMORY_ACCESS 1 #endif #endif @@ -92,36 +96,58 @@ #if defined(_MSC_VER) && \ defined(_WIN32_WCE) /* Visual Studio for WinCE doesn't support Hardware \ bit count */ +#undef LZ4_FORCE_SW_BITCOUNT /* avoid double def */ #define LZ4_FORCE_SW_BITCOUNT #endif /*-************************************ * Dependency **************************************/ -#define LZ4_STATIC_LINKING_ONLY +/* + * LZ4_SRC_INCLUDED: + * Amalgamation flag, whether lz4.c is included + */ +#ifndef LZ4_SRC_INCLUDED +#define LZ4_SRC_INCLUDED 1 +#endif + +#ifndef LZ4_DISABLE_DEPRECATE_WARNINGS #define LZ4_DISABLE_DEPRECATE_WARNINGS /* due to \ LZ4_decompress_safe_withPrefix64k */ +#endif + +#ifndef LZ4_STATIC_LINKING_ONLY +#define LZ4_STATIC_LINKING_ONLY +#endif #include "lz4.h" /* see also "memory routines" below */ /*-************************************ * Compiler Options **************************************/ -#ifdef _MSC_VER /* Visual Studio */ -#include +#if defined(_MSC_VER) && (_MSC_VER >= 1400) /* Visual Studio 2005+ */ +#include /* only present in VS2005+ */ #pragma warning( \ disable : 4127) /* disable: C4127: conditional expression is constant */ -#pragma warning(disable : 4293) /* disable: C4293: too large shift (32-bits) \ - */ +#pragma warning( \ + disable : 6237) /* disable: C6237: conditional expression is always 0 */ +#pragma warning( \ + disable : 6239) /* disable: C6239: ( && ) \ + always evaluates to the result of */ +#pragma warning( \ + disable : 6240) /* disable: C6240: ( && ) \ + always evaluates to the result of */ +#pragma warning(disable : 6326) /* disable: C6326: Potential comparison of a \ + constant with another constant */ #endif /* _MSC_VER */ #ifndef LZ4_FORCE_INLINE -#ifdef _MSC_VER /* Visual Studio */ +#if defined(_MSC_VER) && !defined(__clang__) /* MSVC */ #define LZ4_FORCE_INLINE static __forceinline #else #if defined(__cplusplus) || \ defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L /* C99 */ -#ifdef __GNUC__ +#if defined(__GNUC__) || defined(__clang__) #define LZ4_FORCE_INLINE static inline __attribute__((always_inline)) #else #define LZ4_FORCE_INLINE static inline @@ -132,8 +158,8 @@ #endif /* _MSC_VER */ #endif /* LZ4_FORCE_INLINE */ -/* LZ4_FORCE_O2_GCC_PPC64LE and LZ4_FORCE_O2_INLINE_GCC_PPC64LE - * Gcc on ppc64le generates an unrolled SIMDized loop for LZ4_wildCopy, +/* LZ4_FORCE_O2 and LZ4_FORCE_INLINE + * gcc on ppc64le generates an unrolled SIMDized loop for LZ4_wildCopy8, * together with a simple 8-byte copy loop as a fall-back path. * However, this optimization hurts the decompression speed by >30%, * because the execution does not go to the optimized loop @@ -141,18 +167,19 @@ * before going to the fall-back path become useless overhead. * This optimization happens only with the -O3 flag, and -O2 generates * a simple 8-byte copy loop. - * With gcc on ppc64le, all of the LZ4_decompress_* and LZ4_wildCopy + * With gcc on ppc64le, all of the LZ4_decompress_* and LZ4_wildCopy8 * functions are annotated with __attribute__((optimize("O2"))), - * and also LZ4_wildCopy is forcibly inlined, so that the O2 attribute - * of LZ4_wildCopy does not affect the compression speed. + * and also LZ4_wildCopy8 is forcibly inlined, so that the O2 attribute + * of LZ4_wildCopy8 does not affect the compression speed. */ -#if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__) -#define LZ4_FORCE_O2_GCC_PPC64LE __attribute__((optimize("O2"))) -#define LZ4_FORCE_O2_INLINE_GCC_PPC64LE \ - __attribute__((optimize("O2"))) LZ4_FORCE_INLINE +#if defined(__PPC64__) && defined(__LITTLE_ENDIAN__) && defined(__GNUC__) && \ + !defined(__clang__) +#define LZ4_FORCE_O2 __attribute__((optimize("O2"))) +#undef LZ4_FORCE_INLINE +#define LZ4_FORCE_INLINE \ + static __inline __attribute__((optimize("O2"), always_inline)) #else -#define LZ4_FORCE_O2_GCC_PPC64LE -#define LZ4_FORCE_O2_INLINE_GCC_PPC64LE static +#define LZ4_FORCE_O2 #endif #if (defined(__GNUC__) && (__GNUC__ >= 3)) || \ @@ -170,19 +197,135 @@ #define unlikely(expr) expect((expr) != 0, 0) #endif +/* Should the alignment test prove unreliable, for some reason, + * it can be disabled by setting LZ4_ALIGN_TEST to 0 */ +#ifndef LZ4_ALIGN_TEST /* can be externally provided */ +#define LZ4_ALIGN_TEST 1 +#endif + /*-************************************ * Memory routines **************************************/ + +/*! LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION : + * Disable relatively high-level LZ4/HC functions that use dynamic memory + * allocation functions (malloc(), calloc(), free()). + * + * Note that this is a compile-time switch. And since it disables + * public/stable LZ4 v1 API functions, we don't recommend using this + * symbol to generate a library for distribution. + * + * The following public functions are removed when this symbol is defined. + * - lz4 : LZ4_createStream, LZ4_freeStream, + * LZ4_createStreamDecode, LZ4_freeStreamDecode, LZ4_create + * (deprecated) + * - lz4hc : LZ4_createStreamHC, LZ4_freeStreamHC, + * LZ4_createHC (deprecated), LZ4_freeHC (deprecated) + * - lz4frame, lz4file : All LZ4F_* functions + */ +#if defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) +#define ALLOC(s) lz4_error_memory_allocation_is_disabled +#define ALLOC_AND_ZERO(s) lz4_error_memory_allocation_is_disabled +#define FREEMEM(p) lz4_error_memory_allocation_is_disabled +#elif defined(LZ4_USER_MEMORY_FUNCTIONS) +/* memory management functions can be customized by user project. + * Below functions must exist somewhere in the Project + * and be available at link time */ +void *LZ4_malloc(size_t s); +void *LZ4_calloc(size_t n, size_t s); +void LZ4_free(void *p); +#define ALLOC(s) LZ4_malloc(s) +#define ALLOC_AND_ZERO(s) LZ4_calloc(1, s) +#define FREEMEM(p) LZ4_free(p) +#else #include /* malloc, calloc, free */ #define ALLOC(s) malloc(s) #define ALLOC_AND_ZERO(s) calloc(1, s) #define FREEMEM(p) free(p) +#endif + +#if !LZ4_FREESTANDING #include /* memset, memcpy */ -#define MEM_INIT(p, v, s) memset((p), (v), (s)) +#endif +#if !defined(LZ4_memset) +#define LZ4_memset(p, v, s) memset((p), (v), (s)) +#endif +#define MEM_INIT(p, v, s) LZ4_memset((p), (v), (s)) + +/*-************************************ + * Common Constants + **************************************/ +#define MINMATCH 4 + +#define WILDCOPYLENGTH 8 +#define LASTLITERALS \ + 5 /* see ../doc/lz4_Block_format.md#parsing-restrictions \ + */ +#define MFLIMIT 12 /* see ../doc/lz4_Block_format.md#parsing-restrictions */ +#define MATCH_SAFEGUARD_DISTANCE \ + ((2 * WILDCOPYLENGTH) - \ + MINMATCH) /* ensure it's possible to write 2 x wildcopyLength without \ + overflowing output buffer */ +#define FASTLOOP_SAFE_DISTANCE 64 +static const int LZ4_minLength = (MFLIMIT + 1); + +#define KB *(1 << 10) +#define MB *(1 << 20) +#define GB *(1U << 30) + +#define LZ4_DISTANCE_ABSOLUTE_MAX 65535 +#if (LZ4_DISTANCE_MAX > \ + LZ4_DISTANCE_ABSOLUTE_MAX) /* max supported by LZ4 format */ +#error "LZ4_DISTANCE_MAX is too big : must be <= 65535" +#endif + +#define ML_BITS 4 +#define ML_MASK ((1U << ML_BITS) - 1) +#define RUN_BITS (8 - ML_BITS) +#define RUN_MASK ((1U << RUN_BITS) - 1) + +/*-************************************ + * Error detection + **************************************/ +#if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 1) +#include +#else +#ifndef assert +#define assert(condition) ((void)0) +#endif +#endif + +#define LZ4_STATIC_ASSERT(c) \ + { \ + enum { LZ4_static_assert = 1 / (int)(!!(c)) }; \ + } /* use after variable declarations */ + +#if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 2) +#include +static int g_debuglog_enable = 1; +#define DEBUGLOG(l, ...) \ + { \ + if ((g_debuglog_enable) && (l <= LZ4_DEBUG)) { \ + fprintf(stderr, __FILE__ " %i: ", __LINE__); \ + fprintf(stderr, __VA_ARGS__); \ + fprintf(stderr, " \n"); \ + } \ + } +#else +#define DEBUGLOG(l, ...) \ + { \ + } /* disabled */ +#endif + +static int LZ4_isAligned(const void *ptr, size_t alignment) +{ + return ((size_t)ptr & (alignment - 1)) == 0; +} /*-************************************ - * Basic Types + * Types **************************************/ +#include #if defined(__cplusplus) || \ (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) #include @@ -193,6 +336,9 @@ typedef int32_t S32; typedef uint64_t U64; typedef uintptr_t uptrval; #else +#if UINT_MAX != 4294967295UL +#error "LZ4 code (when not C++ or C99) assumes that sizeof(int) == 4" +#endif typedef unsigned char BYTE; typedef unsigned short U16; typedef unsigned int U32; @@ -207,9 +353,41 @@ typedef U64 reg_t; /* 64-bits in x32 mode */ typedef size_t reg_t; /* 32-bits in x32 mode */ #endif +typedef enum { + notLimited = 0, + limitedOutput = 1, + fillOutput = 2 +} limitedOutput_directive; + /*-************************************ * Reading and writing into memory **************************************/ + +/** + * LZ4 relies on memcpy with a constant size being inlined. In freestanding + * environments, the compiler can't assume the implementation of memcpy() is + * standard compliant, so it can't apply its specialized memcpy() inlining + * logic. When possible, use __builtin_memcpy() to tell the compiler to analyze + * memcpy() as if it were standard compliant, so it can inline it in + * freestanding environments. This is needed when decompressing the Linux + * Kernel, for example. + */ +#if !defined(LZ4_memcpy) +#if defined(__GNUC__) && (__GNUC__ >= 4) +#define LZ4_memcpy(dst, src, size) __builtin_memcpy(dst, src, size) +#else +#define LZ4_memcpy(dst, src, size) memcpy(dst, src, size) +#endif +#endif + +#if !defined(LZ4_memmove) +#if defined(__GNUC__) && (__GNUC__ >= 4) +#define LZ4_memmove __builtin_memmove +#else +#define LZ4_memmove memmove +#endif +#endif + static unsigned LZ4_isLittleEndian(void) { const union { @@ -219,6 +397,13 @@ static unsigned LZ4_isLittleEndian(void) return one.c[0]; } +#if defined(__GNUC__) || defined(__INTEL_COMPILER) +#define LZ4_PACK(__Declaration__) __Declaration__ __attribute__((__packed__)) +#elif defined(_MSC_VER) +#define LZ4_PACK(__Declaration__) \ + __pragma(pack(push, 1)) __Declaration__ __pragma(pack(pop)) +#endif + #if defined(LZ4_FORCE_MEMORY_ACCESS) && (LZ4_FORCE_MEMORY_ACCESS == 2) /* lie to the compiler about data alignment; use with caution */ @@ -226,12 +411,10 @@ static U16 LZ4_read16(const void *memPtr) { return *(const U16 *)memPtr; } - static U32 LZ4_read32(const void *memPtr) { return *(const U32 *)memPtr; } - static reg_t LZ4_read_ARCH(const void *memPtr) { return *(const reg_t *)memPtr; @@ -241,7 +424,6 @@ static void LZ4_write16(void *memPtr, U16 value) { *(U16 *)memPtr = value; } - static void LZ4_write32(void *memPtr, U32 value) { *(U32 *)memPtr = value; @@ -252,71 +434,63 @@ static void LZ4_write32(void *memPtr, U32 value) /* __pack instructions are safer, but compiler specific, hence potentially * problematic for some compilers */ /* currently only defined for gcc and icc */ -typedef union { - U16 u16; - U32 u32; - reg_t uArch; -} __attribute__((packed)) unalign; +LZ4_PACK(typedef struct { U16 u16; }) LZ4_unalign16; +LZ4_PACK(typedef struct { U32 u32; }) LZ4_unalign32; +LZ4_PACK(typedef struct { reg_t uArch; }) LZ4_unalignST; static U16 LZ4_read16(const void *ptr) { - return ((const unalign *)ptr)->u16; + return ((const LZ4_unalign16 *)ptr)->u16; } - static U32 LZ4_read32(const void *ptr) { - return ((const unalign *)ptr)->u32; + return ((const LZ4_unalign32 *)ptr)->u32; } - static reg_t LZ4_read_ARCH(const void *ptr) { - return ((const unalign *)ptr)->uArch; + return ((const LZ4_unalignST *)ptr)->uArch; } static void LZ4_write16(void *memPtr, U16 value) { - ((unalign *)memPtr)->u16 = value; + ((LZ4_unalign16 *)memPtr)->u16 = value; } - static void LZ4_write32(void *memPtr, U32 value) { - ((unalign *)memPtr)->u32 = value; + ((LZ4_unalign32 *)memPtr)->u32 = value; } -#else /* safe and portable access through memcpy() */ +#else /* safe and portable access using memcpy() */ static U16 LZ4_read16(const void *memPtr) { U16 val; - - memcpy(&val, memPtr, sizeof(val)); + LZ4_memcpy(&val, memPtr, sizeof(val)); return val; } static U32 LZ4_read32(const void *memPtr) { U32 val; - - memcpy(&val, memPtr, sizeof(val)); + LZ4_memcpy(&val, memPtr, sizeof(val)); return val; } static reg_t LZ4_read_ARCH(const void *memPtr) { reg_t val; - - memcpy(&val, memPtr, sizeof(val)); + LZ4_memcpy(&val, memPtr, sizeof(val)); return val; } static void LZ4_write16(void *memPtr, U16 value) { - memcpy(memPtr, &value, sizeof(value)); + LZ4_memcpy(memPtr, &value, sizeof(value)); } static void LZ4_write32(void *memPtr, U32 value) { - memcpy(memPtr, &value, sizeof(value)); + LZ4_memcpy(memPtr, &value, sizeof(value)); } #endif /* LZ4_FORCE_MEMORY_ACCESS */ @@ -328,10 +502,22 @@ static U16 LZ4_readLE16(const void *memPtr) } else { const BYTE *p = (const BYTE *)memPtr; + return (U16)((U16)p[0] | (p[1] << 8)); + } +} - return (U16)((U16)p[0] + (p[1] << 8)); +#ifdef LZ4_STATIC_LINKING_ONLY_ENDIANNESS_INDEPENDENT_OUTPUT +static U32 LZ4_readLE32(const void *memPtr) +{ + if (LZ4_isLittleEndian()) { + return LZ4_read32(memPtr); + } + else { + const BYTE *p = (const BYTE *)memPtr; + return (U32)p[0] | (p[1] << 8) | (p[2] << 16) | (p[3] << 24); } } +#endif static void LZ4_writeLE16(void *memPtr, U16 value) { @@ -340,7 +526,6 @@ static void LZ4_writeLE16(void *memPtr, U16 value) } else { BYTE *p = (BYTE *)memPtr; - p[0] = (BYTE)value; p[1] = (BYTE)(value >> 8); } @@ -348,74 +533,129 @@ static void LZ4_writeLE16(void *memPtr, U16 value) /* customized variant of memcpy, which can overwrite up to 8 bytes beyond dstEnd */ -LZ4_FORCE_O2_INLINE_GCC_PPC64LE -void LZ4_wildCopy(void *dstPtr, const void *srcPtr, void *dstEnd) +LZ4_FORCE_INLINE +void LZ4_wildCopy8(void *dstPtr, const void *srcPtr, void *dstEnd) { BYTE *d = (BYTE *)dstPtr; const BYTE *s = (const BYTE *)srcPtr; BYTE *const e = (BYTE *)dstEnd; do { - memcpy(d, s, 8); + LZ4_memcpy(d, s, 8); d += 8; s += 8; } while (d < e); } -/*-************************************ - * Common Constants - **************************************/ -#define MINMATCH 4 +static const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4}; +static const int dec64table[8] = {0, 0, 0, -1, -4, 1, 2, 3}; -#define WILDCOPYLENGTH 8 -#define LASTLITERALS 5 -#define MFLIMIT (WILDCOPYLENGTH + MINMATCH) -static const int LZ4_minLength = (MFLIMIT + 1); +#ifndef LZ4_FAST_DEC_LOOP +#if defined __i386__ || defined _M_IX86 || defined __x86_64__ || defined _M_X64 +#define LZ4_FAST_DEC_LOOP 1 +#elif defined(__aarch64__) && defined(__APPLE__) +#define LZ4_FAST_DEC_LOOP 1 +#elif defined(__aarch64__) && !defined(__clang__) +/* On non-Apple aarch64, we disable this optimization for clang because + * on certain mobile chipsets, performance is reduced with clang. For + * more information refer to https://github.com/lz4/lz4/pull/707 */ +#define LZ4_FAST_DEC_LOOP 1 +#else +#define LZ4_FAST_DEC_LOOP 0 +#endif +#endif -#define KB *(1 << 10) -#define MB *(1 << 20) -#define GB *(1U << 30) +#if LZ4_FAST_DEC_LOOP + +LZ4_FORCE_INLINE void LZ4_memcpy_using_offset_base(BYTE *dstPtr, + const BYTE *srcPtr, + BYTE *dstEnd, + const size_t offset) +{ + assert(srcPtr + offset == dstPtr); + if (offset < 8) { + LZ4_write32(dstPtr, 0); /* silence an msan warning when offset==0 */ + dstPtr[0] = srcPtr[0]; + dstPtr[1] = srcPtr[1]; + dstPtr[2] = srcPtr[2]; + dstPtr[3] = srcPtr[3]; + srcPtr += inc32table[offset]; + LZ4_memcpy(dstPtr + 4, srcPtr, 4); + srcPtr -= dec64table[offset]; + dstPtr += 8; + } + else { + LZ4_memcpy(dstPtr, srcPtr, 8); + dstPtr += 8; + srcPtr += 8; + } -#define MAXD_LOG 16 -#define MAX_DISTANCE ((1 << MAXD_LOG) - 1) + LZ4_wildCopy8(dstPtr, srcPtr, dstEnd); +} -#define ML_BITS 4 -#define ML_MASK ((1U << ML_BITS) - 1) -#define RUN_BITS (8 - ML_BITS) -#define RUN_MASK ((1U << RUN_BITS) - 1) +/* customized variant of memcpy, which can overwrite up to 32 bytes beyond + * dstEnd this version copies two times 16 bytes (instead of one time 32 bytes) + * because it must be compatible with offsets >= 16. */ +LZ4_FORCE_INLINE void LZ4_wildCopy32(void *dstPtr, const void *srcPtr, + void *dstEnd) +{ + BYTE *d = (BYTE *)dstPtr; + const BYTE *s = (const BYTE *)srcPtr; + BYTE *const e = (BYTE *)dstEnd; -/*-************************************ - * Error detection - **************************************/ -#if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 1) -#include -#else -#ifndef assert -#define assert(condition) ((void)0) -#endif -#endif + do { + LZ4_memcpy(d, s, 16); + LZ4_memcpy(d + 16, s + 16, 16); + d += 32; + s += 32; + } while (d < e); +} -#define LZ4_STATIC_ASSERT(c) \ - { \ - enum { LZ4_static_assert = 1 / (int)(!!(c)) }; \ - } /* use after variable declarations */ +/* LZ4_memcpy_using_offset() presumes : + * - dstEnd >= dstPtr + MINMATCH + * - there is at least 12 bytes available to write after dstEnd */ +LZ4_FORCE_INLINE void LZ4_memcpy_using_offset(BYTE *dstPtr, const BYTE *srcPtr, + BYTE *dstEnd, const size_t offset) +{ + BYTE v[8]; -#if defined(LZ4_DEBUG) && (LZ4_DEBUG >= 2) -#include -static int g_debuglog_enable = 1; + assert(dstEnd >= dstPtr + MINMATCH); -#define DEBUGLOG(l, ...) \ - { \ - if ((g_debuglog_enable) && (l <= LZ4_DEBUG)) { \ - fprintf(stderr, __FILE__ ": "); \ - fprintf(stderr, __VA_ARGS__); \ - fprintf(stderr, " \n"); \ - } \ + switch (offset) { + case 1: + MEM_INIT(v, *srcPtr, 8); + break; + case 2: + LZ4_memcpy(v, srcPtr, 2); + LZ4_memcpy(&v[2], srcPtr, 2); +#if defined(_MSC_VER) && (_MSC_VER <= 1937) /* MSVC 2022 ver 17.7 or earlier \ + */ +#pragma warning(push) +#pragma warning( \ + disable : 6385) /* warning C6385: Reading invalid data from 'v'. */ +#endif + LZ4_memcpy(&v[4], v, 4); +#if defined(_MSC_VER) && (_MSC_VER <= 1937) /* MSVC 2022 ver 17.7 or earlier \ + */ +#pragma warning(pop) +#endif + break; + case 4: + LZ4_memcpy(v, srcPtr, 4); + LZ4_memcpy(&v[4], srcPtr, 4); + break; + default: + LZ4_memcpy_using_offset_base(dstPtr, srcPtr, dstEnd, offset); + return; } -#else -#define DEBUGLOG(l, ...) \ - { \ - } /* disabled */ + + LZ4_memcpy(dstPtr, v, 8); + dstPtr += 8; + while (dstPtr < dstEnd) { + LZ4_memcpy(dstPtr, v, 8); + dstPtr += 8; + } +} #endif /*-************************************ @@ -423,64 +663,89 @@ static int g_debuglog_enable = 1; **************************************/ static unsigned LZ4_NbCommonBytes(reg_t val) { + assert(val != 0); if (LZ4_isLittleEndian()) { if (sizeof(val) == 8) { -#if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) +#if defined(_MSC_VER) && (_MSC_VER >= 1800) && \ + (defined(_M_AMD64) && !defined(_M_ARM64EC)) && \ + !defined(LZ4_FORCE_SW_BITCOUNT) +/*-************************************************************************************************* + * ARM64EC is a Microsoft-designed ARM64 ABI compatible with AMD64 applications + *on ARM64 Windows 11. The ARM64EC ABI does not support AVX/AVX2/AVX512 + *instructions, nor their relevant intrinsics including _tzcnt_u64. Therefore, + *we need to neuter the _tzcnt_u64 code path for ARM64EC. + ****************************************************************************************************/ +#if defined(__clang__) && (__clang_major__ < 10) + /* Avoid undefined clang-cl intrinsics issue. + * See https://github.com/lz4/lz4/pull/1017 for details. */ + return (unsigned)__builtin_ia32_tzcnt_u64(val) >> 3; +#else + /* x64 CPUS without BMI support interpret `TZCNT` as `REP BSF` */ + return (unsigned)_tzcnt_u64(val) >> 3; +#endif +#elif defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) unsigned long r = 0; - _BitScanForward64(&r, (U64)val); - return (int)(r >> 3); -#elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 3))) && \ + return (unsigned)r >> 3; +#elif (defined(__clang__) || \ + (defined(__GNUC__) && \ + ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \ !defined(LZ4_FORCE_SW_BITCOUNT) - return (__builtin_ctzll((U64)val) >> 3); + return (unsigned)__builtin_ctzll((U64)val) >> 3; #else - static const int DeBruijnBytePos[64] = { - 0, 0, 0, 0, 0, 1, 1, 2, 0, 3, 1, 3, 1, 4, 2, 7, - 0, 2, 3, 6, 1, 5, 3, 5, 1, 3, 4, 4, 2, 5, 6, 7, - 7, 0, 1, 2, 3, 3, 4, 6, 2, 6, 5, 5, 3, 4, 5, 6, - 7, 1, 2, 4, 6, 4, 4, 5, 7, 2, 6, 5, 7, 6, 7, 7}; - return DeBruijnBytePos[((U64)((val & -(long long)val) * - 0x0218A392CDABBD3FULL)) >> - 58]; + const U64 m = 0x0101010101010101ULL; + val ^= val - 1; + return (unsigned)(((U64)((val & (m - 1)) * m)) >> 56); #endif } - else { /* 32 bits */ -#if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) + else /* 32 bits */ { +#if defined(_MSC_VER) && (_MSC_VER >= 1400) && !defined(LZ4_FORCE_SW_BITCOUNT) unsigned long r; - _BitScanForward(&r, (U32)val); - return (int)(r >> 3); -#elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 3))) && \ - !defined(LZ4_FORCE_SW_BITCOUNT) - return (__builtin_ctz((U32)val) >> 3); + return (unsigned)r >> 3; +#elif (defined(__clang__) || \ + (defined(__GNUC__) && \ + ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \ + !defined(__TINYC__) && !defined(LZ4_FORCE_SW_BITCOUNT) + return (unsigned)__builtin_ctz((U32)val) >> 3; #else - static const int DeBruijnBytePos[32] = { - 0, 0, 3, 0, 3, 1, 3, 0, 3, 2, 2, 1, 3, 2, 0, 1, - 3, 3, 1, 2, 2, 2, 2, 0, 3, 1, 2, 0, 1, 0, 1, 1}; - return DeBruijnBytePos[((U32)((val & -(S32)val) * 0x077CB531U)) >> - 27]; + const U32 m = 0x01010101; + return (unsigned)((((val - 1) ^ val) & (m - 1)) * m) >> 24; #endif } } - else { /* Big Endian CPU */ - if (sizeof(val) == 8) { /* 64-bits */ -#if defined(_MSC_VER) && defined(_WIN64) && !defined(LZ4_FORCE_SW_BITCOUNT) - unsigned long r = 0; - - _BitScanReverse64(&r, val); - return (unsigned)(r >> 3); -#elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 3))) && \ - !defined(LZ4_FORCE_SW_BITCOUNT) - return (__builtin_clzll((U64)val) >> 3); + else /* Big Endian CPU */ { + if (sizeof(val) == 8) { +#if (defined(__clang__) || \ + (defined(__GNUC__) && \ + ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \ + !defined(__TINYC__) && !defined(LZ4_FORCE_SW_BITCOUNT) + return (unsigned)__builtin_clzll((U64)val) >> 3; +#else +#if 1 + /* this method is probably faster, + * but adds a 128 bytes lookup table */ + static const unsigned char ctz7_tab[128] = { + 7, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 4, 0, 1, + 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 5, 0, 1, 0, 2, 0, + 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 4, 0, 1, 0, 2, 0, 1, 0, 3, + 0, 1, 0, 2, 0, 1, 0, 6, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, + 2, 0, 1, 0, 4, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, + 0, 5, 0, 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, 4, 0, + 1, 0, 2, 0, 1, 0, 3, 0, 1, 0, 2, 0, 1, 0, + }; + U64 const mask = 0x0101010101010101ULL; + U64 const t = (((val >> 8) - mask) | val) & mask; + return ctz7_tab[(t * 0x0080402010080402ULL) >> 57]; #else + /* this method doesn't consume memory space like the previous one, + * but it contains several branches, + * that may end up slowing execution */ static const U32 by32 = - sizeof(val) * - 4; /* 32 on 64 bits (goal), 16 on 32 bits. - Just to avoid some static analyzer complaining about shift - by 32 on 32-bits target. Note that this code path is never - triggered in 32-bits mode. */ + sizeof(val) * 4; /* 32 on 64 bits (goal), 16 on 32 bits. +Just to avoid some static analyzer complaining about shift by 32 on 32-bits +target. Note that this code path is never triggered in 32-bits mode. */ unsigned r; - if (!(val >> by32)) { r = 4; } @@ -497,30 +762,21 @@ static unsigned LZ4_NbCommonBytes(reg_t val) } r += (!val); return r; +#endif #endif } - else { /* 32 bits */ -#if defined(_MSC_VER) && !defined(LZ4_FORCE_SW_BITCOUNT) - unsigned long r = 0; - - _BitScanReverse(&r, (unsigned long)val); - return (unsigned)(r >> 3); -#elif (defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 3))) && \ + else /* 32 bits */ { +#if (defined(__clang__) || \ + (defined(__GNUC__) && \ + ((__GNUC__ > 3) || ((__GNUC__ == 3) && (__GNUC_MINOR__ >= 4))))) && \ !defined(LZ4_FORCE_SW_BITCOUNT) - return (__builtin_clz((U32)val) >> 3); + return (unsigned)__builtin_clz((U32)val) >> 3; #else - unsigned r; - - if (!(val >> 16)) { - r = 2; - val >>= 8; - } - else { - r = 0; - val >>= 24; - } - r += (!val); - return r; + val >>= 8; + val = ((((val + 0x00FFFF00) | 0x00FFFFFF) + val) | + (val + 0x00FF0000)) >> + 24; + return (unsigned)val ^ 3; #endif } } @@ -534,7 +790,6 @@ unsigned LZ4_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *pInLimit) if (likely(pIn < pInLimit - (STEPSIZE - 1))) { reg_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn); - if (!diff) { pIn += STEPSIZE; pMatch += STEPSIZE; @@ -546,7 +801,6 @@ unsigned LZ4_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *pInLimit) while (likely(pIn < pInLimit - (STEPSIZE - 1))) { reg_t const diff = LZ4_read_ARCH(pMatch) ^ LZ4_read_ARCH(pIn); - if (!diff) { pIn += STEPSIZE; pMatch += STEPSIZE; @@ -571,7 +825,6 @@ unsigned LZ4_count(const BYTE *pIn, const BYTE *pMatch, const BYTE *pInLimit) } #ifndef LZ4_COMMONDEFS_ONLY - /*-************************************ * Local Constants **************************************/ @@ -582,11 +835,6 @@ static const U32 LZ4_skipTrigger = 6; /* Increase this value ==> compression run /*-************************************ * Local Structures and types **************************************/ -typedef enum { - notLimited = 0, - limitedOutput = 1, - fillOutput = 2 -} limitedOutput_directive; typedef enum { clearedTable = 0, byPtr, byU32, byU16 } tableType_t; /** @@ -602,10 +850,10 @@ typedef enum { clearedTable = 0, byPtr, byU32, byU16 } tableType_t; * - usingExtDict : Like withPrefix64k, but the preceding content is somewhere * else in memory, starting at ctx->dictionary with length * ctx->dictSize. - * - usingDictCtx : Like usingExtDict, but everything concerning the preceding - * content is in a separate context, pointed to by - * ctx->dictCtx. ctx->dictionary, ctx->dictSize, and table - * entries in the current context that refer to positions + * - usingDictCtx : Everything concerning the preceding content is + * in a separate context, pointed to by ctx->dictCtx. + * ctx->dictionary, ctx->dictSize, and table entries + * in the current context that refer to positions * preceding the beginning of the current compression are * ignored. Instead, ctx->dictCtx->dictionary and ctx->dictCtx * ->dictSize describe the location and size of the preceding @@ -620,9 +868,6 @@ typedef enum { } dict_directive; typedef enum { noDictIssue = 0, dictSmall } dictIssue_directive; -typedef enum { endOnOutputSize = 0, endOnInputSize = 1 } endCondition_directive; -typedef enum { full = 0, partial = 1 } earlyEnd_directive; - /*-************************************ * Local Utils **************************************/ @@ -630,26 +875,43 @@ int LZ4_versionNumber(void) { return LZ4_VERSION_NUMBER; } - const char *LZ4_versionString(void) { return LZ4_VERSION_STRING; } - int LZ4_compressBound(int isize) { return LZ4_COMPRESSBOUND(isize); } - int LZ4_sizeofState(void) { - return LZ4_STREAMSIZE; + return sizeof(LZ4_stream_t); +} + +/*-**************************************** + * Internal Definitions, used only in Tests + *******************************************/ +#if defined(__cplusplus) +extern "C" { +#endif + +int LZ4_compress_forceExtDict(LZ4_stream_t *LZ4_dict, const char *source, + char *dest, int srcSize); + +int LZ4_decompress_safe_forceExtDict(const char *source, char *dest, + int compressedSize, int maxOutputSize, + const void *dictStart, size_t dictSize); +int LZ4_decompress_safe_partial_forceExtDict( + const char *source, char *dest, int compressedSize, int targetOutputSize, + int dstCapacity, const void *dictStart, size_t dictSize); +#if defined(__cplusplus) } +#endif /*-****************************** * Compression functions ********************************/ -static U32 LZ4_hash4(U32 sequence, tableType_t const tableType) +LZ4_FORCE_INLINE U32 LZ4_hash4(U32 sequence, tableType_t const tableType) { if (tableType == byU16) return ((sequence * 2654435761U) >> @@ -658,16 +920,17 @@ static U32 LZ4_hash4(U32 sequence, tableType_t const tableType) return ((sequence * 2654435761U) >> ((MINMATCH * 8) - LZ4_HASHLOG)); } -static U32 LZ4_hash5(U64 sequence, tableType_t const tableType) +LZ4_FORCE_INLINE U32 LZ4_hash5(U64 sequence, tableType_t const tableType) { - static const U64 prime5bytes = 889523592379ULL; - static const U64 prime8bytes = 11400714785074694791ULL; const U32 hashLog = (tableType == byU16) ? LZ4_HASHLOG + 1 : LZ4_HASHLOG; - - if (LZ4_isLittleEndian()) + if (LZ4_isLittleEndian()) { + const U64 prime5bytes = 889523592379ULL; return (U32)(((sequence << 24) * prime5bytes) >> (64 - hashLog)); - else + } + else { + const U64 prime8bytes = 11400714785074694791ULL; return (U32)(((sequence >> 24) * prime8bytes) >> (64 - hashLog)); + } } LZ4_FORCE_INLINE U32 LZ4_hashPosition(const void *const p, @@ -675,72 +938,81 @@ LZ4_FORCE_INLINE U32 LZ4_hashPosition(const void *const p, { if ((sizeof(reg_t) == 8) && (tableType != byU16)) return LZ4_hash5(LZ4_read_ARCH(p), tableType); + +#ifdef LZ4_STATIC_LINKING_ONLY_ENDIANNESS_INDEPENDENT_OUTPUT + return LZ4_hash4(LZ4_readLE32(p), tableType); +#else return LZ4_hash4(LZ4_read32(p), tableType); +#endif } -static void LZ4_putIndexOnHash(U32 idx, U32 h, void *tableBase, - tableType_t const tableType) +LZ4_FORCE_INLINE void LZ4_clearHash(U32 h, void *tableBase, + tableType_t const tableType) { switch (tableType) { - default: /* fallthrough */ - case clearedTable: /* fallthrough */ - case byPtr: { /* illegal! */ + default: /* fallthrough */ + case clearedTable: { /* illegal! */ assert(0); return; } + case byPtr: { + const BYTE **hashTable = (const BYTE **)tableBase; + hashTable[h] = NULL; + return; + } case byU32: { U32 *hashTable = (U32 *)tableBase; - - hashTable[h] = idx; + hashTable[h] = 0; return; } case byU16: { U16 *hashTable = (U16 *)tableBase; - - assert(idx < 65536); - hashTable[h] = (U16)idx; + hashTable[h] = 0; return; } } } -static void LZ4_putPositionOnHash(const BYTE *p, U32 h, void *tableBase, - tableType_t const tableType, - const BYTE *srcBase) +LZ4_FORCE_INLINE void LZ4_putIndexOnHash(U32 idx, U32 h, void *tableBase, + tableType_t const tableType) { switch (tableType) { - case clearedTable: { /* illegal! */ + default: /* fallthrough */ + case clearedTable: /* fallthrough */ + case byPtr: { /* illegal! */ assert(0); return; } - case byPtr: { - const BYTE **hashTable = (const BYTE **)tableBase; - - hashTable[h] = p; - return; - } case byU32: { U32 *hashTable = (U32 *)tableBase; - - hashTable[h] = (U32)(p - srcBase); + hashTable[h] = idx; return; } case byU16: { U16 *hashTable = (U16 *)tableBase; - - hashTable[h] = (U16)(p - srcBase); + assert(idx < 65536); + hashTable[h] = (U16)idx; return; } } } +/* LZ4_putPosition*() : only used in byPtr mode */ +LZ4_FORCE_INLINE void LZ4_putPositionOnHash(const BYTE *p, U32 h, + void *tableBase, + tableType_t const tableType) +{ + const BYTE **const hashTable = (const BYTE **)tableBase; + assert(tableType == byPtr); + (void)tableType; + hashTable[h] = p; +} + LZ4_FORCE_INLINE void LZ4_putPosition(const BYTE *p, void *tableBase, - tableType_t tableType, - const BYTE *srcBase) + tableType_t tableType) { U32 const h = LZ4_hashPosition(p, tableType); - - LZ4_putPositionOnHash(p, h, tableBase, tableType, srcBase); + LZ4_putPositionOnHash(p, h, tableBase, tableType); } /* LZ4_getIndexOnHash() : @@ -749,19 +1021,17 @@ LZ4_FORCE_INLINE void LZ4_putPosition(const BYTE *p, void *tableBase, * Assumption 1 : only valid if tableType == byU32 or byU16. * Assumption 2 : h is presumed valid (within limits of hash table) */ -static U32 LZ4_getIndexOnHash(U32 h, const void *tableBase, - tableType_t tableType) +LZ4_FORCE_INLINE U32 LZ4_getIndexOnHash(U32 h, const void *tableBase, + tableType_t tableType) { LZ4_STATIC_ASSERT(LZ4_MEMORY_USAGE > 2); if (tableType == byU32) { const U32 *const hashTable = (const U32 *)tableBase; - assert(h < (1U << (LZ4_MEMORY_USAGE - 2))); return hashTable[h]; } if (tableType == byU16) { const U16 *const hashTable = (const U16 *)tableBase; - assert(h < (1U << (LZ4_MEMORY_USAGE - 1))); return hashTable[h]; } @@ -770,34 +1040,21 @@ static U32 LZ4_getIndexOnHash(U32 h, const void *tableBase, } static const BYTE *LZ4_getPositionOnHash(U32 h, const void *tableBase, - tableType_t tableType, - const BYTE *srcBase) + tableType_t tableType) { - if (tableType == byPtr) { + assert(tableType == byPtr); + (void)tableType; + { const BYTE *const *hashTable = (const BYTE *const *)tableBase; - return hashTable[h]; } - if (tableType == byU32) { - const U32 *const hashTable = (const U32 *)tableBase; - - return hashTable[h] + srcBase; - } - { - const U16 *const hashTable = (const U16 *)tableBase; - - return hashTable[h] + srcBase; - } /* default, to ensure a return */ } -LZ4_FORCE_INLINE const BYTE *LZ4_getPosition(const BYTE *p, - const void *tableBase, - tableType_t tableType, - const BYTE *srcBase) +LZ4_FORCE_INLINE const BYTE * +LZ4_getPosition(const BYTE *p, const void *tableBase, tableType_t tableType) { U32 const h = LZ4_hashPosition(p, tableType); - - return LZ4_getPositionOnHash(h, tableBase, tableType, srcBase); + return LZ4_getPositionOnHash(h, tableBase, tableType); } LZ4_FORCE_INLINE void LZ4_prepareTable(LZ4_stream_t_internal *const cctx, @@ -808,25 +1065,27 @@ LZ4_FORCE_INLINE void LZ4_prepareTable(LZ4_stream_t_internal *const cctx, * therefore safe to use no matter what mode we're in. Otherwise, we figure * out if it's safe to leave as is or whether it needs to be reset. */ - if (cctx->tableType != clearedTable) { - if (cctx->tableType != tableType || - (tableType == byU16 && - cctx->currentOffset + inputSize >= 0xFFFFU) || - (tableType == byU32 && cctx->currentOffset > 1 GB) || + if ((tableType_t)cctx->tableType != clearedTable) { + assert(inputSize >= 0); + if ((tableType_t)cctx->tableType != tableType || + ((tableType == byU16) && + cctx->currentOffset + (unsigned)inputSize >= 0xFFFFU) || + ((tableType == byU32) && cctx->currentOffset > 1 GB) || tableType == byPtr || inputSize >= 4 KB) { DEBUGLOG(4, "LZ4_prepareTable: Resetting table in %p", cctx); MEM_INIT(cctx->hashTable, 0, LZ4_HASHTABLESIZE); cctx->currentOffset = 0; - cctx->tableType = clearedTable; + cctx->tableType = (U32)clearedTable; } else { DEBUGLOG(4, "LZ4_prepareTable: Re-use hash table (no reset)"); } } - /* Adding a gap, so all previous entries are > MAX_DISTANCE back, is faster - * than compressing without a gap. However, compressing with - * currentOffset == 0 is faster still, so we preserve that case. + /* Adding a gap, so all previous entries are > LZ4_DISTANCE_MAX back, + * is faster than compressing without a gap. + * However, compressing with currentOffset == 0 is faster still, + * so we preserve that case. */ if (cctx->currentOffset != 0 && tableType == byU32) { DEBUGLOG(5, "LZ4_prepareTable: adding 64KB to currentOffset"); @@ -839,16 +1098,21 @@ LZ4_FORCE_INLINE void LZ4_prepareTable(LZ4_stream_t_internal *const cctx, cctx->dictSize = 0; } -/** LZ4_compress_generic() : - inlined, to ensure branches are decided at compilation time */ -LZ4_FORCE_INLINE int LZ4_compress_generic( +/** LZ4_compress_generic_validated() : + * inlined, to ensure branches are decided at compilation time. + * The following conditions are presumed already validated: + * - source != NULL + * - inputSize > 0 + */ +LZ4_FORCE_INLINE int LZ4_compress_generic_validated( LZ4_stream_t_internal *const cctx, const char *const source, char *const dest, const int inputSize, - int *inputConsumed, /* only written when outputLimited == fillOutput */ - const int maxOutputSize, const limitedOutput_directive outputLimited, + int *inputConsumed, /* only written when outputDirective == fillOutput */ + const int maxOutputSize, const limitedOutput_directive outputDirective, const tableType_t tableType, const dict_directive dictDirective, - const dictIssue_directive dictIssue, const U32 acceleration) + const dictIssue_directive dictIssue, const int acceleration) { + int result; const BYTE *ip = (const BYTE *)source; U32 const startIndex = cctx->currentOffset; @@ -864,13 +1128,13 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( const U32 dictDelta = (dictDirective == usingDictCtx) ? startIndex - dictCtx->currentOffset : 0; /* make indexes in dictCtx comparable with - index in current context */ + indexes in current context */ int const maybe_extMem = (dictDirective == usingExtDict) || (dictDirective == usingDictCtx); U32 const prefixIdxLimit = startIndex - dictSize; /* used when dictDirective == dictSmall */ - const BYTE *const dictEnd = dictionary + dictSize; + const BYTE *const dictEnd = dictionary ? dictionary + dictSize : dictionary; const BYTE *anchor = (const BYTE *)source; const BYTE *const iend = ip + inputSize; const BYTE *const mflimitPlusOne = iend - MFLIMIT + 1; @@ -878,7 +1142,8 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( /* the dictCtx currentOffset is indexed on the start of the dictionary, * while a dictionary in the current context precedes the currentOffset */ - const BYTE *dictBase = dictDirective == usingDictCtx + const BYTE *dictBase = (dictionary == NULL) ? NULL + : (dictDirective == usingDictCtx) ? dictionary + dictSize - dictCtx->currentOffset : dictionary + dictSize - startIndex; @@ -888,18 +1153,20 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( U32 offset = 0; U32 forwardH; - DEBUGLOG(5, "LZ4_compress_generic: srcSize=%i, tableType=%u", inputSize, - tableType); - /* Init conditions */ - if (outputLimited == fillOutput && maxOutputSize < 1) - return 0; /* Impossible to store anything */ - if ((U32)inputSize > (U32)LZ4_MAX_INPUT_SIZE) - return 0; /* Unsupported inputSize, too large (or negative) */ - if ((tableType == byU16) && (inputSize >= LZ4_64Klimit)) - return 0; /* Size too large (not within 64K limit) */ + DEBUGLOG(5, "LZ4_compress_generic_validated: srcSize=%i, tableType=%u", + inputSize, tableType); + assert(ip != NULL); + if (tableType == byU16) + assert(inputSize < + LZ4_64Klimit); /* Size too large (not within 64K limit) */ if (tableType == byPtr) assert(dictDirective == noDict); /* only supported use case with byPtr */ + /* If init conditions are not met, we don't have to mark stream + * as having dirty context, since no action was taken yet */ + if (outputDirective == fillOutput && maxOutputSize < 1) { + return 0; + } /* Impossible to store anything */ assert(acceleration >= 1); lowLimit = @@ -916,31 +1183,38 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( cctx->dictSize += (U32)inputSize; } cctx->currentOffset += (U32)inputSize; - cctx->tableType = tableType; + cctx->tableType = (U32)tableType; if (inputSize < LZ4_minLength) goto _last_literals; /* Input too small, no compression (all literals) */ /* First Byte */ - LZ4_putPosition(ip, cctx->hashTable, tableType, base); - ip++; - forwardH = LZ4_hashPosition(ip, tableType); - - /* Main Loop */ + { + U32 const h = LZ4_hashPosition(ip, tableType); + if (tableType == byPtr) { + LZ4_putPositionOnHash(ip, h, cctx->hashTable, byPtr); + } + else { + LZ4_putIndexOnHash(startIndex, h, cctx->hashTable, tableType); + } + } + ip++; + forwardH = LZ4_hashPosition(ip, tableType); + + /* Main Loop */ for (;;) { const BYTE *match; BYTE *token; + const BYTE *filledIp; /* Find a match */ if (tableType == byPtr) { const BYTE *forwardIp = ip; - unsigned step = 1; - unsigned searchMatchNb = acceleration << LZ4_skipTrigger; - + int step = 1; + int searchMatchNb = acceleration << LZ4_skipTrigger; do { U32 const h = forwardH; - ip = forwardIp; forwardIp += step; step = (searchMatchNb++ >> LZ4_skipTrigger); @@ -949,20 +1223,18 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( goto _last_literals; assert(ip < mflimitPlusOne); - match = - LZ4_getPositionOnHash(h, cctx->hashTable, tableType, base); + match = LZ4_getPositionOnHash(h, cctx->hashTable, tableType); forwardH = LZ4_hashPosition(forwardIp, tableType); - LZ4_putPositionOnHash(ip, h, cctx->hashTable, tableType, base); + LZ4_putPositionOnHash(ip, h, cctx->hashTable, tableType); - } while ((match + MAX_DISTANCE < ip) || + } while ((match + LZ4_DISTANCE_MAX < ip) || (LZ4_read32(match) != LZ4_read32(ip))); } else { /* byU32, byU16 */ const BYTE *forwardIp = ip; - unsigned step = 1; - unsigned searchMatchNb = acceleration << LZ4_skipTrigger; - + int step = 1; + int searchMatchNb = acceleration << LZ4_skipTrigger; do { U32 const h = forwardH; U32 const current = (U32)(forwardIp - base); @@ -1002,6 +1274,7 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( "startIndex=%5u", matchIndex, startIndex); assert(startIndex - matchIndex >= MINMATCH); + assert(dictBase); match = dictBase + matchIndex; lowLimit = dictionary; } @@ -1016,16 +1289,20 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( forwardH = LZ4_hashPosition(forwardIp, tableType); LZ4_putIndexOnHash(current, h, cctx->hashTable, tableType); - if ((dictIssue == dictSmall) && (matchIndex < prefixIdxLimit)) - continue; /* match outside of valid area */ + DEBUGLOG(7, "candidate at pos=%u (offset=%u \n", matchIndex, + current - matchIndex); + if ((dictIssue == dictSmall) && (matchIndex < prefixIdxLimit)) { + continue; + } /* match outside of valid area */ assert(matchIndex < current); - if ((tableType != byU16) && - (matchIndex + MAX_DISTANCE < current)) - continue; /* too far */ - if (tableType == byU16) - assert((current - matchIndex) <= - MAX_DISTANCE); /* too_far presumed impossible with - byU16 */ + if (((tableType != byU16) || + (LZ4_DISTANCE_MAX < LZ4_DISTANCE_ABSOLUTE_MAX)) && + (matchIndex + LZ4_DISTANCE_MAX < current)) { + continue; + } /* too far */ + assert( + (current - matchIndex) <= + LZ4_DISTANCE_MAX); /* match now expected within distance */ if (LZ4_read32(match) == LZ4_read32(ip)) { if (maybe_extMem) @@ -1037,36 +1314,41 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( } /* Catch up */ - while (((ip > anchor) & (match > lowLimit)) && - (unlikely(ip[-1] == match[-1]))) { - ip--; - match--; + filledIp = ip; + assert(ip > anchor); /* this is always true as ip has been advanced + before entering the main loop */ + if ((match > lowLimit) && unlikely(ip[-1] == match[-1])) { + do { + ip--; + match--; + } while (((ip > anchor) & (match > lowLimit)) && + (unlikely(ip[-1] == match[-1]))); } /* Encode Literals */ { unsigned const litLength = (unsigned)(ip - anchor); - token = op++; - if ((outputLimited == + if ((outputDirective == limitedOutput) && /* Check output buffer overflow */ (unlikely(op + litLength + (2 + 1 + LASTLITERALS) + (litLength / 255) > - olimit))) - return 0; - if ((outputLimited == fillOutput) && - (unlikely( - op + (litLength + 240) / 255 /* litlen */ + - litLength /* literals */ + 2 /* offset */ + - 1 /* token */ + MFLIMIT - MINMATCH - /* min last literals so last match is <= end - MFLIMIT */ - > olimit))) { + olimit))) { + return 0; /* cannot compress within `dst` budget. Stored indexes + in hash table are nonetheless fine */ + } + if ((outputDirective == fillOutput) && + (unlikely(op + (litLength + 240) / 255 /* litlen */ + + litLength /* literals */ + 2 /* offset */ + + 1 /* token */ + MFLIMIT - + MINMATCH /* min last literals so last match is <= + end - MFLIMIT */ + > olimit))) { op--; goto _last_literals; } if (litLength >= RUN_MASK) { - int len = (int)litLength - RUN_MASK; - + unsigned len = litLength - RUN_MASK; *token = (RUN_MASK << ML_BITS); for (; len >= 255; len -= 255) *op++ = 255; @@ -1076,7 +1358,7 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( *token = (BYTE)(litLength << ML_BITS); /* Copy Literals */ - LZ4_wildCopy(op, anchor, op + litLength); + LZ4_wildCopy8(op, anchor, op + litLength); op += litLength; DEBUGLOG(6, "seq.start:%i, literals=%u, match.start:%i", (int)(anchor - (const BYTE *)source), litLength, @@ -1095,9 +1377,10 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( * higher 4-bits for literal length supposed already written */ - if ((outputLimited == fillOutput) && - (op + 2 /* offset */ + 1 /* token */ + MFLIMIT - MINMATCH - /* min last literals so last match is <= end - MFLIMIT */ + if ((outputDirective == fillOutput) && + (op + 2 /* offset */ + 1 /* token */ + MFLIMIT - + MINMATCH /* min last literals so last match is <= end - MFLIMIT + */ > olimit)) { /* the match was too close to the end, rewind and go to last * literals */ @@ -1109,14 +1392,14 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( if (maybe_extMem) { /* static test */ DEBUGLOG(6, " with offset=%u (ext if > %i)", offset, (int)(ip - (const BYTE *)source)); - assert(offset <= MAX_DISTANCE && offset > 0); + assert(offset <= LZ4_DISTANCE_MAX && offset > 0); LZ4_writeLE16(op, (U16)offset); op += 2; } else { DEBUGLOG(6, " with offset=%u (same segment)", (U32)(ip - match)); - assert(ip - match <= MAX_DISTANCE); + assert(ip - match <= LZ4_DISTANCE_MAX); LZ4_writeLE16(op, (U16)(ip - match)); op += 2; } @@ -1129,12 +1412,11 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( dictDirective == usingDictCtx) && (lowLimit == dictionary) /* match within extDict */) { const BYTE *limit = ip + (dictEnd - match); - assert(dictEnd > match); if (limit > matchlimit) limit = matchlimit; matchCode = LZ4_count(ip + MINMATCH, match + MINMATCH, limit); - ip += MINMATCH + matchCode; + ip += (size_t)matchCode + MINMATCH; if (ip == limit) { unsigned const more = LZ4_count(limit, (const BYTE *)source, matchlimit); @@ -1148,24 +1430,43 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( else { matchCode = LZ4_count(ip + MINMATCH, match + MINMATCH, matchlimit); - ip += MINMATCH + matchCode; + ip += (size_t)matchCode + MINMATCH; DEBUGLOG(6, " with matchLength=%u", matchCode + MINMATCH); } - if ((outputLimited) && /* Check output buffer overflow */ - (unlikely(op + (1 + LASTLITERALS) + (matchCode >> 8) > + if ((outputDirective) && /* Check output buffer overflow */ + (unlikely(op + (1 + LASTLITERALS) + (matchCode + 240) / 255 > olimit))) { - if (outputLimited == limitedOutput) - return 0; - if (outputLimited == fillOutput) { + if (outputDirective == fillOutput) { /* Match description too long : reduce it */ U32 newMatchCode = 15 /* in token */ - 1 /* to avoid needing a zero byte */ + - ((U32)(olimit - op) - 2 - 1 - LASTLITERALS) * 255; + ((U32)(olimit - op) - 1 - LASTLITERALS) * 255; ip -= matchCode - newMatchCode; + assert(newMatchCode < matchCode); matchCode = newMatchCode; + if (unlikely(ip <= filledIp)) { + /* We have already filled up to filledIp so if ip ends + * up less than filledIp we have positions in the hash + * table beyond the current position. This is a problem + * if we reuse the hash table. So we have to remove + * these positions from the hash table. + */ + const BYTE *ptr; + DEBUGLOG(5, "Clearing %u positions", + (U32)(filledIp - ip)); + for (ptr = ip; ptr <= filledIp; ++ptr) { + U32 const h = LZ4_hashPosition(ptr, tableType); + LZ4_clearHash(h, cctx->hashTable, tableType); + } + } + } + else { + assert(outputDirective == limitedOutput); + return 0; /* cannot compress within `dst` budget. Stored + indexes in hash table are nonetheless fine */ } } if (matchCode >= ML_MASK) { @@ -1183,6 +1484,9 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( else *token += (BYTE)(matchCode); } + /* Ensure we have enough space for the last literals. */ + assert( + !(outputDirective == fillOutput && op + 1 + LASTLITERALS > olimit)); anchor = ip; @@ -1191,14 +1495,23 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( break; /* Fill table */ - LZ4_putPosition(ip - 2, cctx->hashTable, tableType, base); + { + U32 const h = LZ4_hashPosition(ip - 2, tableType); + if (tableType == byPtr) { + LZ4_putPositionOnHash(ip - 2, h, cctx->hashTable, byPtr); + } + else { + U32 const idx = (U32)((ip - 2) - base); + LZ4_putIndexOnHash(idx, h, cctx->hashTable, tableType); + } + } /* Test next position */ if (tableType == byPtr) { - match = LZ4_getPosition(ip, cctx->hashTable, tableType, base); - LZ4_putPosition(ip, cctx->hashTable, tableType, base); - if ((match + MAX_DISTANCE >= ip) && + match = LZ4_getPosition(ip, cctx->hashTable, tableType); + LZ4_putPosition(ip, cctx->hashTable, tableType); + if ((match + LZ4_DISTANCE_MAX >= ip) && (LZ4_read32(match) == LZ4_read32(ip))) { token = op++; *token = 0; @@ -1214,6 +1527,7 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( if (dictDirective == usingDictCtx) { if (matchIndex < startIndex) { /* there was no match, try the dictionary */ + assert(tableType == byU32); matchIndex = LZ4_getIndexOnHash(h, dictCtx->hashTable, byU32); match = dictBase + matchIndex; @@ -1229,6 +1543,7 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( } else if (dictDirective == usingExtDict) { if (matchIndex < startIndex) { + assert(dictBase); match = dictBase + matchIndex; lowLimit = dictionary; /* required for match length counter */ @@ -1246,9 +1561,10 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( assert(matchIndex < current); if (((dictIssue == dictSmall) ? (matchIndex >= prefixIdxLimit) : 1) && - ((tableType == byU16) + (((tableType == byU16) && + (LZ4_DISTANCE_MAX == LZ4_DISTANCE_ABSOLUTE_MAX)) ? 1 - : (matchIndex + MAX_DISTANCE >= current)) && + : (matchIndex + LZ4_DISTANCE_MAX >= current)) && (LZ4_read32(match) == LZ4_read32(ip))) { token = op++; *token = 0; @@ -1269,20 +1585,24 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( /* Encode Last Literals */ { size_t lastRun = (size_t)(iend - anchor); - - if ((outputLimited) && /* Check output buffer overflow */ + if ((outputDirective) && /* Check output buffer overflow */ (op + lastRun + 1 + ((lastRun + 255 - RUN_MASK) / 255) > olimit)) { - if (outputLimited == fillOutput) { + if (outputDirective == fillOutput) { /* adapt lastRun to fill 'dst' */ - lastRun = (olimit - op) - 1; - lastRun -= (lastRun + 240) / 255; + assert(olimit >= op); + lastRun = (size_t)(olimit - op) - 1 /*token*/; + lastRun -= (lastRun + 256 - RUN_MASK) / + 256; /*additional length tokens*/ + } + else { + assert(outputDirective == limitedOutput); + return 0; /* cannot compress within `dst` budget. Stored indexes + in hash table are nonetheless fine */ } - if (outputLimited == limitedOutput) - return 0; } + DEBUGLOG(6, "Final literal run : %i literals", (int)lastRun); if (lastRun >= RUN_MASK) { size_t accumulator = lastRun - RUN_MASK; - *op++ = RUN_MASK << ML_BITS; for (; accumulator >= 255; accumulator -= 255) *op++ = 255; @@ -1291,28 +1611,72 @@ LZ4_FORCE_INLINE int LZ4_compress_generic( else { *op++ = (BYTE)(lastRun << ML_BITS); } - memcpy(op, anchor, lastRun); + LZ4_memcpy(op, anchor, lastRun); ip = anchor + lastRun; op += lastRun; } - if (outputLimited == fillOutput) { + if (outputDirective == fillOutput) { *inputConsumed = (int)(((const char *)ip) - source); } + result = (int)(((char *)op) - dest); + assert(result > 0); DEBUGLOG(5, "LZ4_compress_generic: compressed %i bytes into %i bytes", - inputSize, (int)(((char *)op) - dest)); - return (int)(((char *)op) - dest); + inputSize, result); + return result; +} + +/** LZ4_compress_generic() : + * inlined, to ensure branches are decided at compilation time; + * takes care of src == (NULL, 0) + * and forward the rest to LZ4_compress_generic_validated */ +LZ4_FORCE_INLINE int LZ4_compress_generic( + LZ4_stream_t_internal *const cctx, const char *const src, char *const dst, + const int srcSize, + int *inputConsumed, /* only written when outputDirective == fillOutput */ + const int dstCapacity, const limitedOutput_directive outputDirective, + const tableType_t tableType, const dict_directive dictDirective, + const dictIssue_directive dictIssue, const int acceleration) +{ + DEBUGLOG(5, "LZ4_compress_generic: srcSize=%i, dstCapacity=%i", srcSize, + dstCapacity); + + if ((U32)srcSize > (U32)LZ4_MAX_INPUT_SIZE) { + return 0; + } /* Unsupported srcSize, too large (or negative) */ + if (srcSize == 0) { /* src == NULL supported if srcSize == 0 */ + if (outputDirective != notLimited && dstCapacity <= 0) + return 0; /* no output, can't write anything */ + DEBUGLOG(5, "Generating an empty block"); + assert(outputDirective == notLimited || dstCapacity >= 1); + assert(dst != NULL); + dst[0] = 0; + if (outputDirective == fillOutput) { + assert(inputConsumed != NULL); + *inputConsumed = 0; + } + return 1; + } + assert(src != NULL); + + return LZ4_compress_generic_validated( + cctx, src, dst, srcSize, + inputConsumed, /* only written into if outputDirective == fillOutput */ + dstCapacity, outputDirective, tableType, dictDirective, dictIssue, + acceleration); } int LZ4_compress_fast_extState(void *state, const char *source, char *dest, int inputSize, int maxOutputSize, int acceleration) { - LZ4_stream_t_internal *ctx = &((LZ4_stream_t *)state)->internal_donotuse; - + LZ4_stream_t_internal *const ctx = + &LZ4_initStream(state, sizeof(LZ4_stream_t))->internal_donotuse; + assert(ctx != NULL); if (acceleration < 1) - acceleration = ACCELERATION_DEFAULT; - LZ4_resetStream((LZ4_stream_t *)state); + acceleration = LZ4_ACCELERATION_DEFAULT; + if (acceleration > LZ4_ACCELERATION_MAX) + acceleration = LZ4_ACCELERATION_MAX; if (maxOutputSize >= LZ4_compressBound(inputSize)) { if (inputSize < LZ4_64Klimit) { return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0, @@ -1321,7 +1685,7 @@ int LZ4_compress_fast_extState(void *state, const char *source, char *dest, } else { const tableType_t tableType = - ((sizeof(void *) == 4) && ((uptrval)source > MAX_DISTANCE)) + ((sizeof(void *) == 4) && ((uptrval)source > LZ4_DISTANCE_MAX)) ? byPtr : byU32; return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, 0, @@ -1331,14 +1695,13 @@ int LZ4_compress_fast_extState(void *state, const char *source, char *dest, } else { if (inputSize < LZ4_64Klimit) { - ; return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, byU16, noDict, noDictIssue, acceleration); } else { const tableType_t tableType = - ((sizeof(void *) == 4) && ((uptrval)source > MAX_DISTANCE)) + ((sizeof(void *) == 4) && ((uptrval)source > LZ4_DISTANCE_MAX)) ? byPtr : byU32; return LZ4_compress_generic(ctx, source, dest, inputSize, NULL, @@ -1361,15 +1724,17 @@ int LZ4_compress_fast_extState_fastReset(void *state, const char *src, char *dst, int srcSize, int dstCapacity, int acceleration) { - LZ4_stream_t_internal *ctx = &((LZ4_stream_t *)state)->internal_donotuse; - + LZ4_stream_t_internal *const ctx = + &((LZ4_stream_t *)state)->internal_donotuse; if (acceleration < 1) - acceleration = ACCELERATION_DEFAULT; + acceleration = LZ4_ACCELERATION_DEFAULT; + if (acceleration > LZ4_ACCELERATION_MAX) + acceleration = LZ4_ACCELERATION_MAX; + assert(ctx != NULL); if (dstCapacity >= LZ4_compressBound(srcSize)) { if (srcSize < LZ4_64Klimit) { const tableType_t tableType = byU16; - LZ4_prepareTable(ctx, srcSize, tableType); if (ctx->currentOffset) { return LZ4_compress_generic(ctx, src, dst, srcSize, NULL, 0, @@ -1384,7 +1749,7 @@ int LZ4_compress_fast_extState_fastReset(void *state, const char *src, } else { const tableType_t tableType = - ((sizeof(void *) == 4) && ((uptrval)src > MAX_DISTANCE)) + ((sizeof(void *) == 4) && ((uptrval)src > LZ4_DISTANCE_MAX)) ? byPtr : byU32; LZ4_prepareTable(ctx, srcSize, tableType); @@ -1396,7 +1761,6 @@ int LZ4_compress_fast_extState_fastReset(void *state, const char *src, else { if (srcSize < LZ4_64Klimit) { const tableType_t tableType = byU16; - LZ4_prepareTable(ctx, srcSize, tableType); if (ctx->currentOffset) { return LZ4_compress_generic( @@ -1411,7 +1775,7 @@ int LZ4_compress_fast_extState_fastReset(void *state, const char *src, } else { const tableType_t tableType = - ((sizeof(void *) == 4) && ((uptrval)src > MAX_DISTANCE)) + ((sizeof(void *) == 4) && ((uptrval)src > LZ4_DISTANCE_MAX)) ? byPtr : byU32; LZ4_prepareTable(ctx, srcSize, tableType); @@ -1422,23 +1786,21 @@ int LZ4_compress_fast_extState_fastReset(void *state, const char *src, } } -int LZ4_compress_fast(const char *source, char *dest, int inputSize, - int maxOutputSize, int acceleration) +int LZ4_compress_fast(const char *src, char *dest, int srcSize, int dstCapacity, + int acceleration) { int result; - #if (LZ4_HEAPMODE) - LZ4_stream_t *ctxPtr = - ALLOC(sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ - + LZ4_stream_t *const ctxPtr = (LZ4_stream_t *)ALLOC( + sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ if (ctxPtr == NULL) return 0; #else LZ4_stream_t ctx; LZ4_stream_t *const ctxPtr = &ctx; #endif - result = LZ4_compress_fast_extState(ctxPtr, source, dest, inputSize, - maxOutputSize, acceleration); + result = LZ4_compress_fast_extState(ctxPtr, src, dest, srcSize, dstCapacity, + acceleration); #if (LZ4_HEAPMODE) FREEMEM(ctxPtr); @@ -1446,83 +1808,78 @@ int LZ4_compress_fast(const char *source, char *dest, int inputSize, return result; } -int LZ4_compress_default(const char *source, char *dest, int inputSize, - int maxOutputSize) -{ - return LZ4_compress_fast(source, dest, inputSize, maxOutputSize, 1); -} - -/* hidden debug function */ -/* strangely enough, gcc generates faster code when this function is - * uncommented, even if unused */ -int LZ4_compress_fast_force(const char *source, char *dest, int inputSize, - int maxOutputSize, int acceleration) +int LZ4_compress_default(const char *src, char *dst, int srcSize, + int dstCapacity) { - LZ4_stream_t ctx; - - LZ4_resetStream(&ctx); - - if (inputSize < LZ4_64Klimit) - return LZ4_compress_generic(&ctx.internal_donotuse, source, dest, - inputSize, NULL, maxOutputSize, - limitedOutput, byU16, noDict, noDictIssue, - acceleration); - else - return LZ4_compress_generic( - &ctx.internal_donotuse, source, dest, inputSize, NULL, - maxOutputSize, limitedOutput, sizeof(void *) == 8 ? byU32 : byPtr, - noDict, noDictIssue, acceleration); + return LZ4_compress_fast(src, dst, srcSize, dstCapacity, 1); } /* Note!: This function leaves the stream in an unclean/broken state! * It is not safe to subsequently use the same state with a _fastReset() or * _continue() call without resetting it. */ -static int LZ4_compress_destSize_extState(LZ4_stream_t *state, const char *src, - char *dst, int *srcSizePtr, - int targetDstSize) +static int LZ4_compress_destSize_extState_internal(LZ4_stream_t *state, + const char *src, char *dst, + int *srcSizePtr, + int targetDstSize, + int acceleration) { - LZ4_resetStream(state); + void *const s = LZ4_initStream(state, sizeof(*state)); + assert(s != NULL); + (void)s; if (targetDstSize >= LZ4_compressBound( *srcSizePtr)) { /* compression success is guaranteed */ return LZ4_compress_fast_extState(state, src, dst, *srcSizePtr, - targetDstSize, 1); + targetDstSize, acceleration); } else { if (*srcSizePtr < LZ4_64Klimit) { - return LZ4_compress_generic( - &state->internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, - targetDstSize, fillOutput, byU16, noDict, noDictIssue, 1); + return LZ4_compress_generic(&state->internal_donotuse, src, dst, + *srcSizePtr, srcSizePtr, targetDstSize, + fillOutput, byU16, noDict, noDictIssue, + acceleration); } else { - tableType_t const tableType = - ((sizeof(void *) == 4) && ((uptrval)src > MAX_DISTANCE)) + tableType_t const addrMode = + ((sizeof(void *) == 4) && ((uptrval)src > LZ4_DISTANCE_MAX)) ? byPtr : byU32; - return LZ4_compress_generic( - &state->internal_donotuse, src, dst, *srcSizePtr, srcSizePtr, - targetDstSize, fillOutput, tableType, noDict, noDictIssue, 1); + return LZ4_compress_generic(&state->internal_donotuse, src, dst, + *srcSizePtr, srcSizePtr, targetDstSize, + fillOutput, addrMode, noDict, + noDictIssue, acceleration); } } } +int LZ4_compress_destSize_extState(void *state, const char *src, char *dst, + int *srcSizePtr, int targetDstSize, + int acceleration) +{ + int const r = LZ4_compress_destSize_extState_internal( + (LZ4_stream_t *)state, src, dst, srcSizePtr, targetDstSize, + acceleration); + /* clean the state on exit */ + LZ4_initStream(state, sizeof(LZ4_stream_t)); + return r; +} + int LZ4_compress_destSize(const char *src, char *dst, int *srcSizePtr, int targetDstSize) { #if (LZ4_HEAPMODE) - LZ4_stream_t *ctx = (LZ4_stream_t *)ALLOC( + LZ4_stream_t *const ctx = (LZ4_stream_t *)ALLOC( sizeof(LZ4_stream_t)); /* malloc-calloc always properly aligned */ - if (ctx == NULL) return 0; #else LZ4_stream_t ctxBody; - LZ4_stream_t *ctx = &ctxBody; + LZ4_stream_t *const ctx = &ctxBody; #endif - int result = LZ4_compress_destSize_extState(ctx, src, dst, srcSizePtr, - targetDstSize); + int result = LZ4_compress_destSize_extState_internal( + ctx, src, dst, srcSizePtr, targetDstSize, 1); #if (LZ4_HEAPMODE) FREEMEM(ctx); @@ -1534,25 +1891,53 @@ int LZ4_compress_destSize(const char *src, char *dst, int *srcSizePtr, * Streaming functions ********************************/ +#if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) LZ4_stream_t *LZ4_createStream(void) { - LZ4_stream_t *lz4s = (LZ4_stream_t *)ALLOC(sizeof(LZ4_stream_t)); - - LZ4_STATIC_ASSERT( - LZ4_STREAMSIZE >= - sizeof(LZ4_stream_t_internal)); /* A compilation error here means - LZ4_STREAMSIZE is not large enough */ + LZ4_stream_t *const lz4s = (LZ4_stream_t *)ALLOC(sizeof(LZ4_stream_t)); + LZ4_STATIC_ASSERT(sizeof(LZ4_stream_t) >= sizeof(LZ4_stream_t_internal)); DEBUGLOG(4, "LZ4_createStream %p", lz4s); if (lz4s == NULL) return NULL; - LZ4_resetStream(lz4s); + LZ4_initStream(lz4s, sizeof(*lz4s)); return lz4s; } +#endif +static size_t LZ4_stream_t_alignment(void) +{ +#if LZ4_ALIGN_TEST + typedef struct { + char c; + LZ4_stream_t t; + } t_a; + return sizeof(t_a) - sizeof(LZ4_stream_t); +#else + return 1; /* effectively disabled */ +#endif +} + +LZ4_stream_t *LZ4_initStream(void *buffer, size_t size) +{ + DEBUGLOG(5, "LZ4_initStream"); + if (buffer == NULL) { + return NULL; + } + if (size < sizeof(LZ4_stream_t)) { + return NULL; + } + if (!LZ4_isAligned(buffer, LZ4_stream_t_alignment())) + return NULL; + MEM_INIT(buffer, 0, sizeof(LZ4_stream_t_internal)); + return (LZ4_stream_t *)buffer; +} + +/* resetStream is now deprecated, + * prefer initStream() which is more general */ void LZ4_resetStream(LZ4_stream_t *LZ4_stream) { DEBUGLOG(5, "LZ4_resetStream (ctx:%p)", LZ4_stream); - MEM_INIT(LZ4_stream, 0, sizeof(LZ4_stream_t)); + MEM_INIT(LZ4_stream, 0, sizeof(LZ4_stream_t_internal)); } void LZ4_resetStream_fast(LZ4_stream_t *ctx) @@ -1560,6 +1945,7 @@ void LZ4_resetStream_fast(LZ4_stream_t *ctx) LZ4_prepareTable(&(ctx->internal_donotuse), 0, byU32); } +#if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) int LZ4_freeStream(LZ4_stream_t *LZ4_stream) { if (!LZ4_stream) @@ -1568,15 +1954,18 @@ int LZ4_freeStream(LZ4_stream_t *LZ4_stream) FREEMEM(LZ4_stream); return (0); } +#endif +typedef enum { _ld_fast, _ld_slow } LoadDict_mode_e; #define HASH_UNIT sizeof(reg_t) -int LZ4_loadDict(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize) +int LZ4_loadDict_internal(LZ4_stream_t *LZ4_dict, const char *dictionary, + int dictSize, LoadDict_mode_e _ld) { - LZ4_stream_t_internal *dict = &LZ4_dict->internal_donotuse; + LZ4_stream_t_internal *const dict = &LZ4_dict->internal_donotuse; const tableType_t tableType = byU32; const BYTE *p = (const BYTE *)dictionary; const BYTE *const dictEnd = p + dictSize; - const BYTE *base; + U32 idx32; DEBUGLOG(4, "LZ4_loadDict (%i bytes from %p into %p)", dictSize, dictionary, LZ4_dict); @@ -1593,56 +1982,97 @@ int LZ4_loadDict(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize) * there are only valid offsets in the window, which allows an optimization * in LZ4_compress_fast_continue() where it uses noDictIssue even when the * dictionary isn't a full 64k. */ - - if ((dictEnd - p) > 64 KB) - p = dictEnd - 64 KB; - base = dictEnd - 64 KB - dict->currentOffset; - dict->dictionary = p; - dict->dictSize = (U32)(dictEnd - p); dict->currentOffset += 64 KB; - dict->tableType = tableType; if (dictSize < (int)HASH_UNIT) { return 0; } + if ((dictEnd - p) > 64 KB) + p = dictEnd - 64 KB; + dict->dictionary = p; + dict->dictSize = (U32)(dictEnd - p); + dict->tableType = (U32)tableType; + idx32 = dict->currentOffset - dict->dictSize; + while (p <= dictEnd - HASH_UNIT) { - LZ4_putPosition(p, dict->hashTable, tableType, base); + U32 const h = LZ4_hashPosition(p, tableType); + /* Note: overwriting => favors positions end of dictionary */ + LZ4_putIndexOnHash(idx32, h, dict->hashTable, tableType); p += 3; + idx32 += 3; + } + + if (_ld == _ld_slow) { + /* Fill hash table with additional references, to improve compression + * capability */ + p = dict->dictionary; + idx32 = dict->currentOffset - dict->dictSize; + while (p <= dictEnd - HASH_UNIT) { + U32 const h = LZ4_hashPosition(p, tableType); + U32 const limit = dict->currentOffset - 64 KB; + if (LZ4_getIndexOnHash(h, dict->hashTable, tableType) <= limit) { + /* Note: not overwriting => favors positions beginning of + * dictionary */ + LZ4_putIndexOnHash(idx32, h, dict->hashTable, tableType); + } + p++; + idx32++; + } } - return dict->dictSize; + return (int)dict->dictSize; +} + +int LZ4_loadDict(LZ4_stream_t *LZ4_dict, const char *dictionary, int dictSize) +{ + return LZ4_loadDict_internal(LZ4_dict, dictionary, dictSize, _ld_fast); } -void LZ4_attach_dictionary(LZ4_stream_t *working_stream, - const LZ4_stream_t *dictionary_stream) +int LZ4_loadDictSlow(LZ4_stream_t *LZ4_dict, const char *dictionary, + int dictSize) { - if (dictionary_stream != NULL) { + return LZ4_loadDict_internal(LZ4_dict, dictionary, dictSize, _ld_slow); +} + +void LZ4_attach_dictionary(LZ4_stream_t *workingStream, + const LZ4_stream_t *dictionaryStream) +{ + const LZ4_stream_t_internal *dictCtx = + (dictionaryStream == NULL) ? NULL + : &(dictionaryStream->internal_donotuse); + + DEBUGLOG(4, "LZ4_attach_dictionary (%p, %p, size %u)", workingStream, + dictionaryStream, dictCtx != NULL ? dictCtx->dictSize : 0); + + if (dictCtx != NULL) { /* If the current offset is zero, we will never look in the * external dictionary context, since there is no value a table * entry can take that indicate a miss. In that case, we need * to bump the offset to something non-zero. */ - if (working_stream->internal_donotuse.currentOffset == 0) { - working_stream->internal_donotuse.currentOffset = 64 KB; + if (workingStream->internal_donotuse.currentOffset == 0) { + workingStream->internal_donotuse.currentOffset = 64 KB; + } + + /* Don't actually attach an empty dictionary. + */ + if (dictCtx->dictSize == 0) { + dictCtx = NULL; } - working_stream->internal_donotuse.dictCtx = - &(dictionary_stream->internal_donotuse); - } - else { - working_stream->internal_donotuse.dictCtx = NULL; } + workingStream->internal_donotuse.dictCtx = dictCtx; } static void LZ4_renormDictT(LZ4_stream_t_internal *LZ4_dict, int nextSize) { - if (LZ4_dict->currentOffset + nextSize > + assert(nextSize >= 0); + if (LZ4_dict->currentOffset + (unsigned)nextSize > 0x80000000) { /* potential ptrdiff_t overflow (32-bits mode) */ /* rescale hash table */ U32 const delta = LZ4_dict->currentOffset - 64 KB; const BYTE *dictEnd = LZ4_dict->dictionary + LZ4_dict->dictSize; int i; - DEBUGLOG(4, "LZ4_renormDictT"); for (i = 0; i < LZ4_HASH_SIZE_U32; i++) { if (LZ4_dict->hashTable[i] < delta) @@ -1662,45 +2092,55 @@ int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, int acceleration) { const tableType_t tableType = byU32; - LZ4_stream_t_internal *streamPtr = &LZ4_stream->internal_donotuse; - const BYTE *dictEnd = streamPtr->dictionary + streamPtr->dictSize; + LZ4_stream_t_internal *const streamPtr = &LZ4_stream->internal_donotuse; + const char *dictEnd = + streamPtr->dictSize + ? (const char *)streamPtr->dictionary + streamPtr->dictSize + : NULL; - DEBUGLOG(5, "LZ4_compress_fast_continue (inputSize=%i)", inputSize); + DEBUGLOG(5, "LZ4_compress_fast_continue (inputSize=%i, dictSize=%u)", + inputSize, streamPtr->dictSize); - if (streamPtr->initCheck) - return 0; /* Uninitialized structure detected */ - LZ4_renormDictT(streamPtr, inputSize); /* avoid index overflow */ + LZ4_renormDictT(streamPtr, inputSize); /* fix index overflow */ if (acceleration < 1) - acceleration = ACCELERATION_DEFAULT; + acceleration = LZ4_ACCELERATION_DEFAULT; + if (acceleration > LZ4_ACCELERATION_MAX) + acceleration = LZ4_ACCELERATION_MAX; /* invalidate tiny dictionaries */ - if ((streamPtr->dictSize - 1 < 4) /* intentional underflow */ - && (dictEnd != (const BYTE *)source)) { + if ((streamPtr->dictSize < 4) /* tiny dictionary : not enough for a hash */ + && (dictEnd != source) /* prefix mode */ + && (inputSize > 0) /* tolerance : don't lose history, in case next + invocation would use prefix mode */ + && (streamPtr->dictCtx == NULL) /* usingDictCtx */ + ) { DEBUGLOG( 5, "LZ4_compress_fast_continue: dictSize(%u) at addr:%p is too small", streamPtr->dictSize, streamPtr->dictionary); + /* remove dictionary existence from history, to employ faster prefix + * mode */ streamPtr->dictSize = 0; streamPtr->dictionary = (const BYTE *)source; - dictEnd = (const BYTE *)source; + dictEnd = source; } /* Check overlapping input/dictionary space */ { - const BYTE *sourceEnd = (const BYTE *)source + inputSize; - - if ((sourceEnd > streamPtr->dictionary) && (sourceEnd < dictEnd)) { + const char *const sourceEnd = source + inputSize; + if ((sourceEnd > (const char *)streamPtr->dictionary) && + (sourceEnd < dictEnd)) { streamPtr->dictSize = (U32)(dictEnd - sourceEnd); if (streamPtr->dictSize > 64 KB) streamPtr->dictSize = 64 KB; if (streamPtr->dictSize < 4) streamPtr->dictSize = 0; - streamPtr->dictionary = dictEnd - streamPtr->dictSize; + streamPtr->dictionary = (const BYTE *)dictEnd - streamPtr->dictSize; } } /* prefix mode : source data follows dictionary */ - if (dictEnd == (const BYTE *)source) { + if (dictEnd == source) { if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) return LZ4_compress_generic(streamPtr, source, dest, inputSize, @@ -1717,7 +2157,6 @@ int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, /* external dictionary mode */ { int result; - if (streamPtr->dictCtx) { /* We depend here on the fact that dictCtx'es (produced by * LZ4_loadDict) guarantee that their tables contain no references @@ -1730,7 +2169,7 @@ int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, * cost to copy the dictionary's tables into the active context, * so that the compression loop is only looking into one table. */ - memcpy(streamPtr, streamPtr->dictCtx, sizeof(LZ4_stream_t)); + LZ4_memcpy(streamPtr, streamPtr->dictCtx, sizeof(*streamPtr)); result = LZ4_compress_generic( streamPtr, source, dest, inputSize, NULL, maxOutputSize, limitedOutput, tableType, usingExtDict, noDictIssue, @@ -1743,7 +2182,7 @@ int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, acceleration); } } - else { + else { /* small data <= 4 KB */ if ((streamPtr->dictSize < 64 KB) && (streamPtr->dictSize < streamPtr->currentOffset)) { result = LZ4_compress_generic( @@ -1768,7 +2207,7 @@ int LZ4_compress_fast_continue(LZ4_stream_t *LZ4_stream, const char *source, int LZ4_compress_forceExtDict(LZ4_stream_t *LZ4_dict, const char *source, char *dest, int srcSize) { - LZ4_stream_t_internal *streamPtr = &LZ4_dict->internal_donotuse; + LZ4_stream_t_internal *const streamPtr = &LZ4_dict->internal_donotuse; int result; LZ4_renormDictT(streamPtr, srcSize); @@ -1793,22 +2232,33 @@ int LZ4_compress_forceExtDict(LZ4_stream_t *LZ4_dict, const char *source, /*! LZ4_saveDict() : * If previously compressed data block is not guaranteed to remain available at - * its memory location, save it into a safer place (char* safeBuffer). Note : - * you don't need to call LZ4_loadDict() afterwards, dictionary is immediately - * usable, you can therefore call LZ4_compress_fast_continue(). Return : saved - * dictionary size in bytes (necessarily <= dictSize), or 0 if error. + * its memory location, save it into a safer place (char* safeBuffer). Note : no + * need to call LZ4_loadDict() afterwards, dictionary is immediately usable, one + * can therefore call LZ4_compress_fast_continue() right after. + * @return : saved dictionary size in bytes (necessarily <= dictSize), or 0 if + * error. */ int LZ4_saveDict(LZ4_stream_t *LZ4_dict, char *safeBuffer, int dictSize) { LZ4_stream_t_internal *const dict = &LZ4_dict->internal_donotuse; - const BYTE *const previousDictEnd = dict->dictionary + dict->dictSize; - if ((U32)dictSize > 64 KB) - dictSize = 64 KB; /* useless to define a dictionary > 64 KB */ - if ((U32)dictSize > dict->dictSize) - dictSize = dict->dictSize; + DEBUGLOG(5, "LZ4_saveDict : dictSize=%i, safeBuffer=%p", dictSize, + safeBuffer); - memmove(safeBuffer, previousDictEnd - dictSize, dictSize); + if ((U32)dictSize > 64 KB) { + dictSize = 64 KB; + } /* useless to define a dictionary > 64 KB */ + if ((U32)dictSize > dict->dictSize) { + dictSize = (int)dict->dictSize; + } + + if (safeBuffer == NULL) + assert(dictSize == 0); + if (dictSize > 0) { + const BYTE *const previousDictEnd = dict->dictionary + dict->dictSize; + assert(dict->dictionary); + LZ4_memmove(safeBuffer, previousDictEnd - dictSize, (size_t)dictSize); + } dict->dictionary = (const BYTE *)safeBuffer; dict->dictSize = (U32)dictSize; @@ -1816,316 +2266,797 @@ int LZ4_saveDict(LZ4_stream_t *LZ4_dict, char *safeBuffer, int dictSize) return dictSize; } -/*-***************************** +/*-******************************* * Decompression functions - *******************************/ + ********************************/ + +typedef enum { decode_full_block = 0, partial_decode = 1 } earlyEnd_directive; + +#undef MIN +#define MIN(a, b) ((a) < (b) ? (a) : (b)) + +/* variant for decompress_unsafe() + * does not know end of input + * presumes input is well formed + * note : will consume at least one byte */ +static size_t read_long_length_no_check(const BYTE **pp) +{ + size_t b, l = 0; + do { + b = **pp; + (*pp)++; + l += b; + } while (b == 255); + DEBUGLOG(6, "read_long_length_no_check: +length=%zu using %zu input bytes", + l, l / 255 + 1) + return l; +} + +/* core decoder variant for LZ4_decompress_fast*() + * for legacy support only : these entry points are deprecated. + * - Presumes input is correctly formed (no defense vs malformed inputs) + * - Does not know input size (presume input buffer is "large enough") + * - Decompress a full block (only) + * @return : nb of bytes read from input. + * Note : this variant is not optimized for speed, just for maintenance. + * the goal is to remove support of decompress_fast*() variants by v2.0 + **/ +LZ4_FORCE_INLINE int LZ4_decompress_unsafe_generic( + const BYTE *const istart, BYTE *const ostart, int decompressedSize, + + size_t prefixSize, + const BYTE *const dictStart, /* only if dict==usingExtDict */ + const size_t dictSize /* note: =0 if dictStart==NULL */ +) +{ + const BYTE *ip = istart; + BYTE *op = (BYTE *)ostart; + BYTE *const oend = ostart + decompressedSize; + const BYTE *const prefixStart = ostart - prefixSize; + + DEBUGLOG(5, "LZ4_decompress_unsafe_generic"); + if (dictStart == NULL) + assert(dictSize == 0); + + while (1) { + /* start new sequence */ + unsigned token = *ip++; + + /* literals */ + { + size_t ll = token >> ML_BITS; + if (ll == 15) { + /* long literal length */ + ll += read_long_length_no_check(&ip); + } + if ((size_t)(oend - op) < ll) + return -1; /* output buffer overflow */ + LZ4_memmove(op, ip, ll); /* support in-place decompression */ + op += ll; + ip += ll; + if ((size_t)(oend - op) < MFLIMIT) { + if (op == oend) + break; /* end of block */ + DEBUGLOG( + 5, + "invalid: literals end at distance %zi from end of block", + oend - op); + /* incorrect end of block : + * last match must start at least MFLIMIT==12 bytes before end + * of output block */ + return -1; + } + } + + /* match */ + { + size_t ml = token & 15; + size_t const offset = LZ4_readLE16(ip); + ip += 2; + + if (ml == 15) { + /* long literal length */ + ml += read_long_length_no_check(&ip); + } + ml += MINMATCH; + + if ((size_t)(oend - op) < ml) + return -1; /* output buffer overflow */ + + { + const BYTE *match = op - offset; + + /* out of range */ + if (offset > (size_t)(op - prefixStart) + dictSize) { + DEBUGLOG(6, "offset out of range"); + return -1; + } + + /* check special case : extDict */ + if (offset > (size_t)(op - prefixStart)) { + /* extDict scenario */ + const BYTE *const dictEnd = dictStart + dictSize; + const BYTE *extMatch = + dictEnd - (offset - (size_t)(op - prefixStart)); + size_t const extml = (size_t)(dictEnd - extMatch); + if (extml > ml) { + /* match entirely within extDict */ + LZ4_memmove(op, extMatch, ml); + op += ml; + ml = 0; + } + else { + /* match split between extDict & prefix */ + LZ4_memmove(op, extMatch, extml); + op += extml; + ml -= extml; + } + match = prefixStart; + } + + /* match copy - slow variant, supporting overlap copy */ + { + size_t u; + for (u = 0; u < ml; u++) { + op[u] = match[u]; + } + } + } + op += ml; + if ((size_t)(oend - op) < LASTLITERALS) { + DEBUGLOG( + 5, "invalid: match ends at distance %zi from end of block", + oend - op); + /* incorrect end of block : + * last match must stop at least LASTLITERALS==5 bytes before + * end of output block */ + return -1; + } + } /* match */ + } /* main loop */ + return (int)(ip - istart); +} + +/* Read the variable-length literal or match length. + * + * @ip : input pointer + * @ilimit : position after which if length is not decoded, the input is + *necessarily corrupted. + * @initial_check - check ip >= ipmax before start of loop. Returns + *initial_error if so. + * @error (output) - error code. Must be set to 0 before call. + **/ +typedef size_t Rvl_t; +static const Rvl_t rvl_error = (Rvl_t)(-1); +LZ4_FORCE_INLINE Rvl_t read_variable_length(const BYTE **ip, const BYTE *ilimit, + int initial_check) +{ + Rvl_t s, length = 0; + assert(ip != NULL); + assert(*ip != NULL); + assert(ilimit != NULL); + if (initial_check && unlikely((*ip) >= ilimit)) { /* read limit reached */ + return rvl_error; + } + s = **ip; + (*ip)++; + length += s; + if (unlikely((*ip) > ilimit)) { /* read limit reached */ + return rvl_error; + } + /* accumulator overflow detection (32-bit mode only) */ + if ((sizeof(length) < 8) && unlikely(length > ((Rvl_t)(-1) / 2))) { + return rvl_error; + } + if (likely(s != 255)) + return length; + do { + s = **ip; + (*ip)++; + length += s; + if (unlikely((*ip) > ilimit)) { /* read limit reached */ + return rvl_error; + } + /* accumulator overflow detection (32-bit mode only) */ + if ((sizeof(length) < 8) && unlikely(length > ((Rvl_t)(-1) / 2))) { + return rvl_error; + } + } while (s == 255); + + return length; +} + /*! LZ4_decompress_generic() : * This generic decompression function covers all use cases. * It shall be instantiated several times, using different sets of directives. * Note that it is important for performance that this function really get * inlined, in order to remove useless branches during compilation optimization. */ -LZ4_FORCE_O2_GCC_PPC64LE LZ4_FORCE_INLINE int LZ4_decompress_generic( +LZ4_FORCE_INLINE int LZ4_decompress_generic( const char *const src, char *const dst, int srcSize, - int outputSize, /* If endOnInput==endOnInputSize, this value is - `dstCapacity` */ - int endOnInput, /* endOnOutputSize, endOnInputSize */ - int partialDecoding, /* full, partial */ - int targetOutputSize, /* only used if partialDecoding==partial */ - int dict, /* noDict, withPrefix64k, usingExtDict */ + int outputSize, /* If endOnInput==endOnInputSize, this value is + `dstCapacity` */ + + earlyEnd_directive partialDecoding, /* full, partial */ + dict_directive dict, /* noDict, withPrefix64k, usingExtDict */ const BYTE *const lowPrefix, /* always <= dst, == dst when no prefix */ const BYTE *const dictStart, /* only if dict==usingExtDict */ const size_t dictSize /* note : = 0 if noDict */ ) { - const BYTE *ip = (const BYTE *)src; - const BYTE *const iend = ip + srcSize; - - BYTE *op = (BYTE *)dst; - BYTE *const oend = op + outputSize; - BYTE *cpy; - BYTE *oexit = op + targetOutputSize; - - const BYTE *const dictEnd = (const BYTE *)dictStart + dictSize; - const unsigned inc32table[8] = {0, 1, 2, 1, 0, 4, 4, 4}; - const int dec64table[8] = {0, 0, 0, -1, -4, 1, 2, 3}; - - const int safeDecode = (endOnInput == endOnInputSize); - const int checkOffset = ((safeDecode) && (dictSize < (int)(64 KB))); - - /* Set up the "end" pointers for the shortcut. */ - const BYTE *const shortiend = - iend - (endOnInput ? 14 : 8) /*maxLL */ - 2 /*offset */; - const BYTE *const shortoend = - oend - (endOnInput ? 14 : 8) /*maxLL */ - 18 /*maxML */; - - DEBUGLOG(5, "LZ4_decompress_generic (srcSize:%i)", srcSize); - - /* Special cases */ - if ((partialDecoding) && (oexit > oend - MFLIMIT)) - oexit = - oend - - MFLIMIT; /* targetOutputSize too high => just decode everything */ - if ((endOnInput) && (unlikely(outputSize == 0))) - return ((srcSize == 1) && (*ip == 0)) ? 0 - : -1; /* Empty output buffer */ - if ((!endOnInput) && (unlikely(outputSize == 0))) - return (*ip == 0 ? 1 : -1); - if ((endOnInput) && unlikely(srcSize == 0)) + if ((src == NULL) || (outputSize < 0)) { return -1; + } + + { + const BYTE *ip = (const BYTE *)src; + const BYTE *const iend = ip + srcSize; + + BYTE *op = (BYTE *)dst; + BYTE *const oend = op + outputSize; + BYTE *cpy; + + const BYTE *const dictEnd = + (dictStart == NULL) ? NULL : dictStart + dictSize; + + const int checkOffset = (dictSize < (int)(64 KB)); + + /* Set up the "end" pointers for the shortcut. */ + const BYTE *const shortiend = iend - 14 /*maxLL*/ - 2 /*offset*/; + const BYTE *const shortoend = oend - 14 /*maxLL*/ - 18 /*maxML*/; - /* Main Loop : decode sequences */ - while (1) { const BYTE *match; size_t offset; + unsigned token; + size_t length; - unsigned const token = *ip++; - size_t length = token >> ML_BITS; /* literal length */ + DEBUGLOG(5, "LZ4_decompress_generic (srcSize:%i, dstSize:%i)", srcSize, + outputSize); - assert(!endOnInput || ip <= iend); /* ip < iend before the increment */ + /* Special cases */ + assert(lowPrefix <= op); + if (unlikely(outputSize == 0)) { + /* Empty output buffer */ + if (partialDecoding) + return 0; + return ((srcSize == 1) && (*ip == 0)) ? 0 : -1; + } + if (unlikely(srcSize == 0)) { + return -1; + } - /* A two-stage shortcut for the most common case: - * 1) If the literal length is 0..14, and there is enough space, - * enter the shortcut and copy 16 bytes on behalf of the literals - * (in the fast mode, only 8 bytes can be safely copied this way). - * 2) Further if the match length is 4..18, copy 18 bytes in a similar - * manner; but we ensure that there's enough space in the output for - * those 18 bytes earlier, upon entering the shortcut (in other words, - * there is a combined check for both stages). - */ - if ((endOnInput ? length != RUN_MASK : length <= 8) - /* strictly "less than" on input, to re-enter the loop with at least - one byte */ - && likely((endOnInput ? ip < shortiend : 1) & (op <= shortoend))) { - /* Copy the literals */ - memcpy(op, ip, endOnInput ? 16 : 8); - op += length; - ip += length; - - /* The second stage: prepare for match copying, decode full info. - * If it doesn't work out, the info won't be wasted. */ - length = token & ML_MASK; /* match length */ + /* LZ4_FAST_DEC_LOOP: + * designed for modern OoO performance cpus, + * where copying reliably 32-bytes is preferable to an unpredictable + * branch. note : fast loop may show a regression for some client arm + * chips. */ +#if LZ4_FAST_DEC_LOOP + if ((oend - op) < FASTLOOP_SAFE_DISTANCE) { + DEBUGLOG(6, "move to safe decode loop"); + goto safe_decode; + } + + /* Fast loop : decode sequences as long as output < + * oend-FASTLOOP_SAFE_DISTANCE */ + DEBUGLOG(6, "using fast decode loop"); + while (1) { + /* Main fastloop assertion: We can always wildcopy + * FASTLOOP_SAFE_DISTANCE */ + assert(oend - op >= FASTLOOP_SAFE_DISTANCE); + assert(ip < iend); + token = *ip++; + length = token >> ML_BITS; /* literal length */ + DEBUGLOG(7, "blockPos%6u: litLength token = %u", + (unsigned)(op - (BYTE *)dst), (unsigned)length); + + /* decode literal length */ + if (length == RUN_MASK) { + size_t const addl = + read_variable_length(&ip, iend - RUN_MASK, 1); + if (addl == rvl_error) { + DEBUGLOG(6, "error reading long literal length"); + goto _output_error; + } + length += addl; + if (unlikely((uptrval)(op) + length < (uptrval)(op))) { + goto _output_error; + } /* overflow detection */ + if (unlikely((uptrval)(ip) + length < (uptrval)(ip))) { + goto _output_error; + } /* overflow detection */ + + /* copy literals */ + LZ4_STATIC_ASSERT(MFLIMIT >= WILDCOPYLENGTH); + if ((op + length > oend - 32) || (ip + length > iend - 32)) { + goto safe_literal_copy; + } + LZ4_wildCopy32(op, ip, op + length); + ip += length; + op += length; + } + else if (ip <= iend - (16 + 1 /*max lit + offset + nextToken*/)) { + /* We don't need to check oend, since we check it once for each + * loop below */ + DEBUGLOG(7, "copy %u bytes in a 16-bytes stripe", + (unsigned)length); + /* Literals can only be <= 14, but hope compilers optimize + * better when copy by a register size */ + LZ4_memcpy(op, ip, 16); + ip += length; + op += length; + } + else { + goto safe_literal_copy; + } + + /* get offset */ offset = LZ4_readLE16(ip); ip += 2; + DEBUGLOG(6, "blockPos%6u: offset = %u", + (unsigned)(op - (BYTE *)dst), (unsigned)offset); match = op - offset; + assert(match <= op); /* overflow check */ + + /* get matchlength */ + length = token & ML_MASK; + DEBUGLOG(7, " match length token = %u (len==%u)", (unsigned)length, + (unsigned)length + MINMATCH); + + if (length == ML_MASK) { + size_t const addl = + read_variable_length(&ip, iend - LASTLITERALS + 1, 0); + if (addl == rvl_error) { + DEBUGLOG(5, "error reading long match length"); + goto _output_error; + } + length += addl; + length += MINMATCH; + DEBUGLOG(7, " long match length == %u", (unsigned)length); + if (unlikely((uptrval)(op) + length < (uptrval)op)) { + goto _output_error; + } /* overflow detection */ + if (op + length >= oend - FASTLOOP_SAFE_DISTANCE) { + goto safe_match_copy; + } + } + else { + length += MINMATCH; + if (op + length >= oend - FASTLOOP_SAFE_DISTANCE) { + DEBUGLOG(7, "moving to safe_match_copy (ml==%u)", + (unsigned)length); + goto safe_match_copy; + } - /* Do not deal with overlapping matches. */ - if ((length != ML_MASK) && (offset >= 8) && - (dict == withPrefix64k || match >= lowPrefix)) { - /* Copy the match. */ - memcpy(op + 0, match + 0, 8); - memcpy(op + 8, match + 8, 8); - memcpy(op + 16, match + 16, 2); - op += length + MINMATCH; - /* Both stages worked, load the next token. */ - continue; + /* Fastpath check: skip LZ4_wildCopy32 when true */ + if ((dict == withPrefix64k) || (match >= lowPrefix)) { + if (offset >= 8) { + assert(match >= lowPrefix); + assert(match <= op); + assert(op + 18 <= oend); + + LZ4_memcpy(op, match, 8); + LZ4_memcpy(op + 8, match + 8, 8); + LZ4_memcpy(op + 16, match + 16, 2); + op += length; + continue; + } + } } - /* The second stage didn't work out, but the info is ready. - * Propel it right to the point of match copying. */ - goto _copy_match; - } + if (checkOffset && (unlikely(match + dictSize < lowPrefix))) { + DEBUGLOG(5, "Error : pos=%zi, offset=%zi => outside buffers", + op - lowPrefix, op - match); + goto _output_error; + } + /* match starting within external dictionary */ + if ((dict == usingExtDict) && (match < lowPrefix)) { + assert(dictEnd != NULL); + if (unlikely(op + length > oend - LASTLITERALS)) { + if (partialDecoding) { + DEBUGLOG(7, "partialDecoding: dictionary match, close " + "to dstEnd"); + length = MIN(length, (size_t)(oend - op)); + } + else { + DEBUGLOG(6, "end-of-block condition violated") + goto _output_error; + } + } - /* decode literal length */ - if (length == RUN_MASK) { - unsigned s; + if (length <= (size_t)(lowPrefix - match)) { + /* match fits entirely within external dictionary : just + * copy */ + LZ4_memmove(op, dictEnd - (lowPrefix - match), length); + op += length; + } + else { + /* match stretches into both external dictionary and current + * block */ + size_t const copySize = (size_t)(lowPrefix - match); + size_t const restSize = length - copySize; + LZ4_memcpy(op, dictEnd - copySize, copySize); + op += copySize; + if (restSize > + (size_t)(op - lowPrefix)) { /* overlap copy */ + BYTE *const endOfMatch = op + restSize; + const BYTE *copyFrom = lowPrefix; + while (op < endOfMatch) { + *op++ = *copyFrom++; + } + } + else { + LZ4_memcpy(op, lowPrefix, restSize); + op += restSize; + } + } + continue; + } - if (unlikely(endOnInput ? ip >= iend - RUN_MASK : 0)) - goto _output_error; /* overflow detection */ - do { - s = *ip++; - length += s; - } while (likely(endOnInput ? ip < iend - RUN_MASK : 1) & - (s == 255)); - if ((safeDecode) && - unlikely((uptrval)(op) + length < (uptrval)(op))) - goto _output_error; /* overflow detection */ - if ((safeDecode) && - unlikely((uptrval)(ip) + length < (uptrval)(ip))) - goto _output_error; /* overflow detection */ - } + /* copy match within block */ + cpy = op + length; - /* copy literals */ - cpy = op + length; - if (((endOnInput) && - ((cpy > (partialDecoding ? oexit : oend - MFLIMIT)) || - (ip + length > iend - (2 + 1 + LASTLITERALS)))) || - ((!endOnInput) && (cpy > oend - WILDCOPYLENGTH))) { - if (partialDecoding) { - if (cpy > oend) - goto _output_error; /* Error : write attempt beyond end of - output buffer */ - if ((endOnInput) && (ip + length > iend)) - goto _output_error; /* Error : read attempt beyond end of - input buffer */ + assert((op <= oend) && (oend - op >= 32)); + if (unlikely(offset < 16)) { + LZ4_memcpy_using_offset(op, match, cpy, offset); } else { - if ((!endOnInput) && (cpy != oend)) - goto _output_error; /* Error : block decoding must stop - exactly there */ - if ((endOnInput) && ((ip + length != iend) || (cpy > oend))) - goto _output_error; /* Error : input must be consumed */ + LZ4_wildCopy32(op, match, cpy); } - memcpy(op, ip, length); - ip += length; - op += length; - break; /* Necessarily EOF, due to parsing restrictions */ + + op = cpy; /* wildcopy correction */ } - LZ4_wildCopy(op, ip, cpy); - ip += length; - op = cpy; + safe_decode: +#endif - /* get offset */ - offset = LZ4_readLE16(ip); - ip += 2; - match = op - offset; + /* Main Loop : decode remaining sequences where output < + * FASTLOOP_SAFE_DISTANCE */ + DEBUGLOG(6, "using safe decode loop"); + while (1) { + assert(ip < iend); + token = *ip++; + length = token >> ML_BITS; /* literal length */ + DEBUGLOG(7, "blockPos%6u: litLength token = %u", + (unsigned)(op - (BYTE *)dst), (unsigned)length); + + /* A two-stage shortcut for the most common case: + * 1) If the literal length is 0..14, and there is enough space, + * enter the shortcut and copy 16 bytes on behalf of the literals + * (in the fast mode, only 8 bytes can be safely copied this way). + * 2) Further if the match length is 4..18, copy 18 bytes in a + * similar manner; but we ensure that there's enough space in the + * output for those 18 bytes earlier, upon entering the shortcut (in + * other words, there is a combined check for both stages). + */ + if ((length != RUN_MASK) + /* strictly "less than" on input, to re-enter the loop with at + least one byte */ + && likely((ip < shortiend) & (op <= shortoend))) { + /* Copy the literals */ + LZ4_memcpy(op, ip, 16); + op += length; + ip += length; + + /* The second stage: prepare for match copying, decode full + * info. If it doesn't work out, the info won't be wasted. */ + length = token & ML_MASK; /* match length */ + DEBUGLOG(7, "blockPos%6u: matchLength token = %u (len=%u)", + (unsigned)(op - (BYTE *)dst), (unsigned)length, + (unsigned)length + 4); + offset = LZ4_readLE16(ip); + ip += 2; + match = op - offset; + assert(match <= op); /* check overflow */ + + /* Do not deal with overlapping matches. */ + if ((length != ML_MASK) && (offset >= 8) && + (dict == withPrefix64k || match >= lowPrefix)) { + /* Copy the match. */ + LZ4_memcpy(op + 0, match + 0, 8); + LZ4_memcpy(op + 8, match + 8, 8); + LZ4_memcpy(op + 16, match + 16, 2); + op += length + MINMATCH; + /* Both stages worked, load the next token. */ + continue; + } - /* get matchlength */ - length = token & ML_MASK; + /* The second stage didn't work out, but the info is ready. + * Propel it right to the point of match copying. */ + goto _copy_match; + } - _copy_match: - if ((checkOffset) && (unlikely(match + dictSize < lowPrefix))) - goto _output_error; /* Error : offset outside buffers */ - LZ4_write32( - op, - (U32) - offset); /* costs ~1%; silence an msan warning when offset==0 */ + /* decode literal length */ + if (length == RUN_MASK) { + size_t const addl = + read_variable_length(&ip, iend - RUN_MASK, 1); + if (addl == rvl_error) { + goto _output_error; + } + length += addl; + if (unlikely((uptrval)(op) + length < (uptrval)(op))) { + goto _output_error; + } /* overflow detection */ + if (unlikely((uptrval)(ip) + length < (uptrval)(ip))) { + goto _output_error; + } /* overflow detection */ + } - if (length == ML_MASK) { - unsigned s; +#if LZ4_FAST_DEC_LOOP + safe_literal_copy: +#endif + /* copy literals */ + cpy = op + length; + + LZ4_STATIC_ASSERT(MFLIMIT >= WILDCOPYLENGTH); + if ((cpy > oend - MFLIMIT) || + (ip + length > iend - (2 + 1 + LASTLITERALS))) { + /* We've either hit the input parsing restriction or the output + * parsing restriction. In the normal scenario, decoding a full + * block, it must be the last sequence, otherwise it's an error + * (invalid input or dimensions). In partialDecoding scenario, + * it's necessary to ensure there is no buffer overflow. + */ + if (partialDecoding) { + /* Since we are partial decoding we may be in this block + * because of the output parsing restriction, which is not + * valid since the output buffer is allowed to be + * undersized. + */ + DEBUGLOG(7, "partialDecoding: copying literals, close to " + "input or output end") + DEBUGLOG(7, "partialDecoding: literal length = %u", + (unsigned)length); + DEBUGLOG( + 7, "partialDecoding: remaining space in dstBuffer : %i", + (int)(oend - op)); + DEBUGLOG( + 7, "partialDecoding: remaining space in srcBuffer : %i", + (int)(iend - ip)); + /* Finishing in the middle of a literals segment, + * due to lack of input. + */ + if (ip + length > iend) { + length = (size_t)(iend - ip); + cpy = op + length; + } + /* Finishing in the middle of a literals segment, + * due to lack of output space. + */ + if (cpy > oend) { + cpy = oend; + assert(op <= oend); + length = (size_t)(oend - op); + } + } + else { + /* We must be on the last sequence (or invalid) because of + * the parsing limitations so check that we exactly consume + * the input and don't overrun the output buffer. + */ + if ((ip + length != iend) || (cpy > oend)) { + DEBUGLOG(5, "should have been last run of literals") + DEBUGLOG(5, "ip(%p) + length(%i) = %p != iend (%p)", ip, + (int)length, ip + length, iend); + DEBUGLOG(5, "or cpy(%p) > (oend-MFLIMIT)(%p)", cpy, + oend - MFLIMIT); + DEBUGLOG(5, + "after writing %u bytes / %i bytes available", + (unsigned)(op - (BYTE *)dst), outputSize); + goto _output_error; + } + } + LZ4_memmove(op, ip, + length); /* supports overlapping memory regions, for + in-place decompression scenarios */ + ip += length; + op += length; + /* Necessarily EOF when !partialDecoding. + * When partialDecoding, it is EOF if we've either + * filled the output buffer or + * can't proceed with reading an offset for following match. + */ + if (!partialDecoding || (cpy == oend) || (ip >= (iend - 2))) { + break; + } + } + else { + LZ4_wildCopy8(op, ip, + cpy); /* can overwrite up to 8 bytes beyond cpy */ + ip += length; + op = cpy; + } - do { - s = *ip++; - if ((endOnInput) && (ip > iend - LASTLITERALS)) + /* get offset */ + offset = LZ4_readLE16(ip); + ip += 2; + match = op - offset; + + /* get matchlength */ + length = token & ML_MASK; + DEBUGLOG(7, "blockPos%6u: matchLength token = %u", + (unsigned)(op - (BYTE *)dst), (unsigned)length); + + _copy_match: + if (length == ML_MASK) { + size_t const addl = + read_variable_length(&ip, iend - LASTLITERALS + 1, 0); + if (addl == rvl_error) { goto _output_error; - length += s; - } while (s == 255); - if ((safeDecode) && unlikely((uptrval)(op) + length < (uptrval)op)) - goto _output_error; /* overflow detection */ - } - length += MINMATCH; + } + length += addl; + if (unlikely((uptrval)(op) + length < (uptrval)op)) + goto _output_error; /* overflow detection */ + } + length += MINMATCH; - /* check external dictionary */ - if ((dict == usingExtDict) && (match < lowPrefix)) { - if (unlikely(op + length > oend - LASTLITERALS)) - goto _output_error; /* doesn't respect parsing restriction */ +#if LZ4_FAST_DEC_LOOP + safe_match_copy: +#endif + if ((checkOffset) && (unlikely(match + dictSize < lowPrefix))) + goto _output_error; /* Error : offset outside buffers */ + /* match starting within external dictionary */ + if ((dict == usingExtDict) && (match < lowPrefix)) { + assert(dictEnd != NULL); + if (unlikely(op + length > oend - LASTLITERALS)) { + if (partialDecoding) + length = MIN(length, (size_t)(oend - op)); + else + goto _output_error; /* doesn't respect parsing + restriction */ + } - if (length <= (size_t)(lowPrefix - match)) { - /* match can be copied as a single segment from external - * dictionary */ - memmove(op, dictEnd - (lowPrefix - match), length); - op += length; + if (length <= (size_t)(lowPrefix - match)) { + /* match fits entirely within external dictionary : just + * copy */ + LZ4_memmove(op, dictEnd - (lowPrefix - match), length); + op += length; + } + else { + /* match stretches into both external dictionary and current + * block */ + size_t const copySize = (size_t)(lowPrefix - match); + size_t const restSize = length - copySize; + LZ4_memcpy(op, dictEnd - copySize, copySize); + op += copySize; + if (restSize > + (size_t)(op - lowPrefix)) { /* overlap copy */ + BYTE *const endOfMatch = op + restSize; + const BYTE *copyFrom = lowPrefix; + while (op < endOfMatch) + *op++ = *copyFrom++; + } + else { + LZ4_memcpy(op, lowPrefix, restSize); + op += restSize; + } + } + continue; } - else { - /* match encompass external dictionary and current block */ - size_t const copySize = (size_t)(lowPrefix - match); - size_t const restSize = length - copySize; - - memcpy(op, dictEnd - copySize, copySize); - op += copySize; - if (restSize > (size_t)(op - lowPrefix)) { /* overlap copy */ - BYTE *const endOfMatch = op + restSize; - const BYTE *copyFrom = lowPrefix; - - while (op < endOfMatch) - *op++ = *copyFrom++; + assert(match >= lowPrefix); + + /* copy match within block */ + cpy = op + length; + + /* partialDecoding : may end anywhere within the block */ + assert(op <= oend); + if (partialDecoding && (cpy > oend - MATCH_SAFEGUARD_DISTANCE)) { + size_t const mlen = MIN(length, (size_t)(oend - op)); + const BYTE *const matchEnd = match + mlen; + BYTE *const copyEnd = op + mlen; + if (matchEnd > op) { /* overlap copy */ + while (op < copyEnd) { + *op++ = *match++; + } } else { - memcpy(op, lowPrefix, restSize); - op += restSize; + LZ4_memcpy(op, match, mlen); } + op = copyEnd; + if (op == oend) { + break; + } + continue; } - continue; - } - /* copy match within block */ - cpy = op + length; - if (unlikely(offset < 8)) { - op[0] = match[0]; - op[1] = match[1]; - op[2] = match[2]; - op[3] = match[3]; - match += inc32table[offset]; - memcpy(op + 4, match, 4); - match -= dec64table[offset]; - } - else { - memcpy(op, match, 8); - match += 8; - } - op += 8; - - if (unlikely(cpy > oend - 12)) { - BYTE *const oCopyLimit = oend - (WILDCOPYLENGTH - 1); - - if (cpy > oend - LASTLITERALS) - goto _output_error; /* Error : last LASTLITERALS bytes must be - literals (uncompressed) */ - if (op < oCopyLimit) { - LZ4_wildCopy(op, match, oCopyLimit); - match += oCopyLimit - op; - op = oCopyLimit; + if (unlikely(offset < 8)) { + LZ4_write32(op, 0); /* silence msan warning when offset==0 */ + op[0] = match[0]; + op[1] = match[1]; + op[2] = match[2]; + op[3] = match[3]; + match += inc32table[offset]; + LZ4_memcpy(op + 4, match, 4); + match -= dec64table[offset]; } - while (op < cpy) - *op++ = *match++; - } - else { - memcpy(op, match, 8); - if (length > 16) - LZ4_wildCopy(op + 8, match + 8, cpy); + else { + LZ4_memcpy(op, match, 8); + match += 8; + } + op += 8; + + if (unlikely(cpy > oend - MATCH_SAFEGUARD_DISTANCE)) { + BYTE *const oCopyLimit = oend - (WILDCOPYLENGTH - 1); + if (cpy > oend - LASTLITERALS) { + goto _output_error; + } /* Error : last LASTLITERALS bytes must be literals + (uncompressed) */ + if (op < oCopyLimit) { + LZ4_wildCopy8(op, match, oCopyLimit); + match += oCopyLimit - op; + op = oCopyLimit; + } + while (op < cpy) { + *op++ = *match++; + } + } + else { + LZ4_memcpy(op, match, 8); + if (length > 16) { + LZ4_wildCopy8(op + 8, match + 8, cpy); + } + } + op = cpy; /* wildcopy correction */ } - op = cpy; /* correction */ - } - /* end of decoding */ - if (endOnInput) + /* end of decoding */ + DEBUGLOG(5, "decoded %i bytes", (int)(((char *)op) - dst)); return (int)(((char *)op) - dst); /* Nb of output bytes decoded */ - else - return (int)(((const char *)ip) - src); /* Nb of input bytes read */ - /* Overflow error detected */ -_output_error: - return (int)(-(((const char *)ip) - src)) - 1; + /* Overflow error detected */ + _output_error: + return (int)(-(((const char *)ip) - src)) - 1; + } } /*===== Instantiate the API decoding functions. =====*/ -LZ4_FORCE_O2_GCC_PPC64LE +LZ4_FORCE_O2 int LZ4_decompress_safe(const char *source, char *dest, int compressedSize, int maxDecompressedSize) { return LZ4_decompress_generic(source, dest, compressedSize, - maxDecompressedSize, endOnInputSize, full, 0, + maxDecompressedSize, decode_full_block, noDict, (BYTE *)dest, NULL, 0); } -LZ4_FORCE_O2_GCC_PPC64LE -int LZ4_decompress_safe_partial(const char *source, char *dest, - int compressedSize, int targetOutputSize, - int maxDecompressedSize) +LZ4_FORCE_O2 +int LZ4_decompress_safe_partial(const char *src, char *dst, int compressedSize, + int targetOutputSize, int dstCapacity) { - return LZ4_decompress_generic( - source, dest, compressedSize, maxDecompressedSize, endOnInputSize, - partial, targetOutputSize, noDict, (BYTE *)dest, NULL, 0); + dstCapacity = MIN(targetOutputSize, dstCapacity); + return LZ4_decompress_generic(src, dst, compressedSize, dstCapacity, + partial_decode, noDict, (BYTE *)dst, NULL, 0); } -LZ4_FORCE_O2_GCC_PPC64LE +LZ4_FORCE_O2 int LZ4_decompress_fast(const char *source, char *dest, int originalSize) { - return LZ4_decompress_generic(source, dest, 0, originalSize, - endOnOutputSize, full, 0, withPrefix64k, - (BYTE *)dest - 64 KB, NULL, 0); + DEBUGLOG(5, "LZ4_decompress_fast"); + return LZ4_decompress_unsafe_generic((const BYTE *)source, (BYTE *)dest, + originalSize, 0, NULL, 0); } /*===== Instantiate a few more decoding cases, used more than once. =====*/ -LZ4_FORCE_O2_GCC_PPC64LE /* Exported, an obsolete API function. */ +LZ4_FORCE_O2 /* Exported, an obsolete API function. */ int LZ4_decompress_safe_withPrefix64k(const char *source, char *dest, int compressedSize, int maxOutputSize) { return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, - endOnInputSize, full, 0, withPrefix64k, + decode_full_block, withPrefix64k, + (BYTE *)dest - 64 KB, NULL, 0); +} + +LZ4_FORCE_O2 +static int LZ4_decompress_safe_partial_withPrefix64k(const char *source, + char *dest, + int compressedSize, + int targetOutputSize, + int dstCapacity) +{ + dstCapacity = MIN(targetOutputSize, dstCapacity); + return LZ4_decompress_generic(source, dest, compressedSize, dstCapacity, + partial_decode, withPrefix64k, (BYTE *)dest - 64 KB, NULL, 0); } @@ -2133,43 +3064,62 @@ LZ4_FORCE_O2_GCC_PPC64LE /* Exported, an obsolete API function. */ int LZ4_decompress_fast_withPrefix64k(const char *source, char *dest, int originalSize) { - /* LZ4_decompress_fast doesn't validate match offsets, - * and thus serves well with any prefixed dictionary. */ - return LZ4_decompress_fast(source, dest, originalSize); + return LZ4_decompress_unsafe_generic((const BYTE *)source, (BYTE *)dest, + originalSize, 64 KB, NULL, 0); } -LZ4_FORCE_O2_GCC_PPC64LE +LZ4_FORCE_O2 static int LZ4_decompress_safe_withSmallPrefix(const char *source, char *dest, int compressedSize, int maxOutputSize, size_t prefixSize) { return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, - endOnInputSize, full, 0, noDict, + decode_full_block, noDict, (BYTE *)dest - prefixSize, NULL, 0); } -LZ4_FORCE_O2_GCC_PPC64LE /* Exported under another name, for tests/fullbench.c - */ -#define LZ4_decompress_safe_extDict LZ4_decompress_safe_forceExtDict - int - LZ4_decompress_safe_extDict(const char *source, char *dest, - int compressedSize, int maxOutputSize, - const void *dictStart, size_t dictSize) +LZ4_FORCE_O2 +static int LZ4_decompress_safe_partial_withSmallPrefix( + const char *source, char *dest, int compressedSize, int targetOutputSize, + int dstCapacity, size_t prefixSize) +{ + dstCapacity = MIN(targetOutputSize, dstCapacity); + return LZ4_decompress_generic(source, dest, compressedSize, dstCapacity, + partial_decode, noDict, + (BYTE *)dest - prefixSize, NULL, 0); +} + +LZ4_FORCE_O2 +int LZ4_decompress_safe_forceExtDict(const char *source, char *dest, + int compressedSize, int maxOutputSize, + const void *dictStart, size_t dictSize) { - return LZ4_decompress_generic( - source, dest, compressedSize, maxOutputSize, endOnInputSize, full, 0, - usingExtDict, (BYTE *)dest, (const BYTE *)dictStart, dictSize); + DEBUGLOG(5, "LZ4_decompress_safe_forceExtDict"); + return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, + decode_full_block, usingExtDict, (BYTE *)dest, + (const BYTE *)dictStart, dictSize); +} + +LZ4_FORCE_O2 +int LZ4_decompress_safe_partial_forceExtDict( + const char *source, char *dest, int compressedSize, int targetOutputSize, + int dstCapacity, const void *dictStart, size_t dictSize) +{ + dstCapacity = MIN(targetOutputSize, dstCapacity); + return LZ4_decompress_generic(source, dest, compressedSize, dstCapacity, + partial_decode, usingExtDict, (BYTE *)dest, + (const BYTE *)dictStart, dictSize); } -LZ4_FORCE_O2_GCC_PPC64LE +LZ4_FORCE_O2 static int LZ4_decompress_fast_extDict(const char *source, char *dest, int originalSize, const void *dictStart, size_t dictSize) { - return LZ4_decompress_generic( - source, dest, 0, originalSize, endOnOutputSize, full, 0, usingExtDict, - (BYTE *)dest, (const BYTE *)dictStart, dictSize); + return LZ4_decompress_unsafe_generic((const BYTE *)source, (BYTE *)dest, + originalSize, 0, + (const BYTE *)dictStart, dictSize); } /* The "double dictionary" mode, for use with e.g. ring buffers: the first part @@ -2183,37 +3133,30 @@ int LZ4_decompress_safe_doubleDict(const char *source, char *dest, size_t dictSize) { return LZ4_decompress_generic(source, dest, compressedSize, maxOutputSize, - endOnInputSize, full, 0, usingExtDict, + decode_full_block, usingExtDict, (BYTE *)dest - prefixSize, (const BYTE *)dictStart, dictSize); } -LZ4_FORCE_INLINE -int LZ4_decompress_fast_doubleDict(const char *source, char *dest, - int originalSize, size_t prefixSize, - const void *dictStart, size_t dictSize) -{ - return LZ4_decompress_generic( - source, dest, 0, originalSize, endOnOutputSize, full, 0, usingExtDict, - (BYTE *)dest - prefixSize, (const BYTE *)dictStart, dictSize); -} - /*===== streaming decompression functions =====*/ +#if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) LZ4_streamDecode_t *LZ4_createStreamDecode(void) { - LZ4_streamDecode_t *lz4s = - (LZ4_streamDecode_t *)ALLOC_AND_ZERO(sizeof(LZ4_streamDecode_t)); - return lz4s; + LZ4_STATIC_ASSERT(sizeof(LZ4_streamDecode_t) >= + sizeof(LZ4_streamDecode_t_internal)); + return (LZ4_streamDecode_t *)ALLOC_AND_ZERO(sizeof(LZ4_streamDecode_t)); } int LZ4_freeStreamDecode(LZ4_streamDecode_t *LZ4_stream) { - if (!LZ4_stream) - return 0; /* support free on NULL */ + if (LZ4_stream == NULL) { + return 0; + } /* support free on NULL */ FREEMEM(LZ4_stream); return 0; } +#endif /*! LZ4_setStreamDecode() : * Use this function to instruct where to find the dictionary. @@ -2225,9 +3168,14 @@ int LZ4_setStreamDecode(LZ4_streamDecode_t *LZ4_streamDecode, const char *dictionary, int dictSize) { LZ4_streamDecode_t_internal *lz4sd = &LZ4_streamDecode->internal_donotuse; - lz4sd->prefixSize = (size_t)dictSize; - lz4sd->prefixEnd = (const BYTE *)dictionary + dictSize; + if (dictSize) { + assert(dictionary != NULL); + lz4sd->prefixEnd = (const BYTE *)dictionary + dictSize; + } + else { + lz4sd->prefixEnd = (const BYTE *)dictionary; + } lz4sd->externalDict = NULL; lz4sd->extDictSize = 0; return 1; @@ -2256,14 +3204,14 @@ int LZ4_decoderRingBufferSize(int maxBlockSize) } /* - *_continue() : - These decoding functions allow decompression of multiple blocks in "streaming" - mode. Previously decoded blocks must still be available at the memory position - where they were decoded. If it's not possible, save the relevant part of - decoded data into a safe buffer, and indicate where it stands using - LZ4_setStreamDecode() - */ -LZ4_FORCE_O2_GCC_PPC64LE +*_continue() : + These decoding functions allow decompression of multiple blocks in +"streaming" mode. Previously decoded blocks must still be available at the +memory position where they were decoded. If it's not possible, save the relevant +part of decoded data into a safe buffer, and indicate where it stands using +LZ4_setStreamDecode() +*/ +LZ4_FORCE_O2 int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *source, char *dest, int compressedSize, int maxOutputSize) @@ -2278,7 +3226,7 @@ int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, LZ4_decompress_safe(source, dest, compressedSize, maxOutputSize); if (result <= 0) return result; - lz4sd->prefixSize = result; + lz4sd->prefixSize = (size_t)result; lz4sd->prefixEnd = (BYTE *)dest + result; } else if (lz4sd->prefixEnd == (BYTE *)dest) { @@ -2295,54 +3243,58 @@ int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, lz4sd->externalDict, lz4sd->extDictSize); if (result <= 0) return result; - lz4sd->prefixSize += result; + lz4sd->prefixSize += (size_t)result; lz4sd->prefixEnd += result; } else { /* The buffer wraps around, or they're switching to another buffer. */ lz4sd->extDictSize = lz4sd->prefixSize; lz4sd->externalDict = lz4sd->prefixEnd - lz4sd->extDictSize; - result = LZ4_decompress_safe_extDict(source, dest, compressedSize, - maxOutputSize, lz4sd->externalDict, - lz4sd->extDictSize); + result = LZ4_decompress_safe_forceExtDict( + source, dest, compressedSize, maxOutputSize, lz4sd->externalDict, + lz4sd->extDictSize); if (result <= 0) return result; - lz4sd->prefixSize = result; + lz4sd->prefixSize = (size_t)result; lz4sd->prefixEnd = (BYTE *)dest + result; } return result; } -LZ4_FORCE_O2_GCC_PPC64LE -int LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, - const char *source, char *dest, - int originalSize) +LZ4_FORCE_O2 int +LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, + const char *source, char *dest, int originalSize) { - LZ4_streamDecode_t_internal *lz4sd = &LZ4_streamDecode->internal_donotuse; + LZ4_streamDecode_t_internal *const lz4sd = + (assert(LZ4_streamDecode != NULL), + &LZ4_streamDecode->internal_donotuse); int result; + DEBUGLOG(5, "LZ4_decompress_fast_continue (toDecodeSize=%i)", originalSize); + assert(originalSize >= 0); + if (lz4sd->prefixSize == 0) { + DEBUGLOG(5, "first invocation : no prefix nor extDict"); assert(lz4sd->extDictSize == 0); result = LZ4_decompress_fast(source, dest, originalSize); if (result <= 0) return result; - lz4sd->prefixSize = originalSize; + lz4sd->prefixSize = (size_t)originalSize; lz4sd->prefixEnd = (BYTE *)dest + originalSize; } else if (lz4sd->prefixEnd == (BYTE *)dest) { - if (lz4sd->prefixSize >= 64 KB - 1 || lz4sd->extDictSize == 0) - result = LZ4_decompress_fast(source, dest, originalSize); - else - result = LZ4_decompress_fast_doubleDict( - source, dest, originalSize, lz4sd->prefixSize, - lz4sd->externalDict, lz4sd->extDictSize); + DEBUGLOG(5, "continue using existing prefix"); + result = LZ4_decompress_unsafe_generic( + (const BYTE *)source, (BYTE *)dest, originalSize, lz4sd->prefixSize, + lz4sd->externalDict, lz4sd->extDictSize); if (result <= 0) return result; - lz4sd->prefixSize += originalSize; + lz4sd->prefixSize += (size_t)originalSize; lz4sd->prefixEnd += originalSize; } else { + DEBUGLOG(5, "prefix becomes extDict"); lz4sd->extDictSize = lz4sd->prefixSize; lz4sd->externalDict = lz4sd->prefixEnd - lz4sd->extDictSize; result = LZ4_decompress_fast_extDict(source, dest, originalSize, @@ -2350,7 +3302,7 @@ int LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, lz4sd->extDictSize); if (result <= 0) return result; - lz4sd->prefixSize = originalSize; + lz4sd->prefixSize = (size_t)originalSize; lz4sd->prefixEnd = (BYTE *)dest + originalSize; } @@ -2358,11 +3310,11 @@ int LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, } /* - Advanced decoding functions : - *_usingDict() : - These decoding functions work the same as "_continue" ones, - the dictionary must be explicitly provided within parameters - */ +Advanced decoding functions : +*_usingDict() : + These decoding functions work the same as "_continue" ones, + the dictionary must be explicitly provided within parameters +*/ int LZ4_decompress_safe_usingDict(const char *source, char *dest, int compressedSize, int maxOutputSize, @@ -2371,14 +3323,42 @@ int LZ4_decompress_safe_usingDict(const char *source, char *dest, if (dictSize == 0) return LZ4_decompress_safe(source, dest, compressedSize, maxOutputSize); if (dictStart + dictSize == dest) { - if (dictSize >= 64 KB - 1) + if (dictSize >= 64 KB - 1) { return LZ4_decompress_safe_withPrefix64k( source, dest, compressedSize, maxOutputSize); - return LZ4_decompress_safe_withSmallPrefix(source, dest, compressedSize, - maxOutputSize, dictSize); + } + assert(dictSize >= 0); + return LZ4_decompress_safe_withSmallPrefix( + source, dest, compressedSize, maxOutputSize, (size_t)dictSize); } - return LZ4_decompress_safe_extDict(source, dest, compressedSize, - maxOutputSize, dictStart, dictSize); + assert(dictSize >= 0); + return LZ4_decompress_safe_forceExtDict(source, dest, compressedSize, + maxOutputSize, dictStart, + (size_t)dictSize); +} + +int LZ4_decompress_safe_partial_usingDict(const char *source, char *dest, + int compressedSize, + int targetOutputSize, int dstCapacity, + const char *dictStart, int dictSize) +{ + if (dictSize == 0) + return LZ4_decompress_safe_partial(source, dest, compressedSize, + targetOutputSize, dstCapacity); + if (dictStart + dictSize == dest) { + if (dictSize >= 64 KB - 1) { + return LZ4_decompress_safe_partial_withPrefix64k( + source, dest, compressedSize, targetOutputSize, dstCapacity); + } + assert(dictSize >= 0); + return LZ4_decompress_safe_partial_withSmallPrefix( + source, dest, compressedSize, targetOutputSize, dstCapacity, + (size_t)dictSize); + } + assert(dictSize >= 0); + return LZ4_decompress_safe_partial_forceExtDict( + source, dest, compressedSize, targetOutputSize, dstCapacity, dictStart, + (size_t)dictSize); } int LZ4_decompress_fast_usingDict(const char *source, char *dest, @@ -2386,9 +3366,12 @@ int LZ4_decompress_fast_usingDict(const char *source, char *dest, int dictSize) { if (dictSize == 0 || dictStart + dictSize == dest) - return LZ4_decompress_fast(source, dest, originalSize); + return LZ4_decompress_unsafe_generic((const BYTE *)source, (BYTE *)dest, + originalSize, (size_t)dictSize, + NULL, 0); + assert(dictSize >= 0); return LZ4_decompress_fast_extDict(source, dest, originalSize, dictStart, - dictSize); + (size_t)dictSize); } /*=************************************************* @@ -2400,25 +3383,20 @@ int LZ4_compress_limitedOutput(const char *source, char *dest, int inputSize, { return LZ4_compress_default(source, dest, inputSize, maxOutputSize); } - -int LZ4_compress(const char *source, char *dest, int inputSize) +int LZ4_compress(const char *src, char *dest, int srcSize) { - return LZ4_compress_default(source, dest, inputSize, - LZ4_compressBound(inputSize)); + return LZ4_compress_default(src, dest, srcSize, LZ4_compressBound(srcSize)); } - int LZ4_compress_limitedOutput_withState(void *state, const char *src, char *dst, int srcSize, int dstSize) { return LZ4_compress_fast_extState(state, src, dst, srcSize, dstSize, 1); } - int LZ4_compress_withState(void *state, const char *src, char *dst, int srcSize) { return LZ4_compress_fast_extState(state, src, dst, srcSize, LZ4_compressBound(srcSize), 1); } - int LZ4_compress_limitedOutput_continue(LZ4_stream_t *LZ4_stream, const char *src, char *dst, int srcSize, int dstCapacity) @@ -2426,7 +3404,6 @@ int LZ4_compress_limitedOutput_continue(LZ4_stream_t *LZ4_stream, return LZ4_compress_fast_continue(LZ4_stream, src, dst, srcSize, dstCapacity, 1); } - int LZ4_compress_continue(LZ4_stream_t *LZ4_stream, const char *source, char *dest, int inputSize) { @@ -2435,17 +3412,15 @@ int LZ4_compress_continue(LZ4_stream_t *LZ4_stream, const char *source, } /* - These decompression functions are deprecated and should no longer be used. - They are only provided here for compatibility with older user programs. - - LZ4_uncompress is totally equivalent to LZ4_decompress_fast - - LZ4_uncompress_unknownOutputSize is totally equivalent to - LZ4_decompress_safe - */ +These decompression functions are deprecated and should no longer be used. +They are only provided here for compatibility with older user programs. +- LZ4_uncompress is totally equivalent to LZ4_decompress_fast +- LZ4_uncompress_unknownOutputSize is totally equivalent to LZ4_decompress_safe +*/ int LZ4_uncompress(const char *source, char *dest, int outputSize) { return LZ4_decompress_fast(source, dest, outputSize); } - int LZ4_uncompress_unknownOutputSize(const char *source, char *dest, int isize, int maxOutputSize) { @@ -2456,7 +3431,7 @@ int LZ4_uncompress_unknownOutputSize(const char *source, char *dest, int isize, int LZ4_sizeofStreamState(void) { - return LZ4_STREAMSIZE; + return sizeof(LZ4_stream_t); } int LZ4_resetStreamState(void *state, char *inputBuffer) @@ -2466,11 +3441,13 @@ int LZ4_resetStreamState(void *state, char *inputBuffer) return 0; } +#if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) void *LZ4_create(char *inputBuffer) { (void)inputBuffer; return LZ4_createStream(); } +#endif char *LZ4_slideInputBuffer(void *state) { diff --git a/lib/gis/lz4.h b/lib/gis/lz4.h index 9ab16d35e0e..e2230ea4a54 100644 --- a/lib/gis/lz4.h +++ b/lib/gis/lz4.h @@ -1,37 +1,37 @@ /* * LZ4 - Fast LZ compression algorithm * Header File - * Copyright (C) 2011-2017, Yann Collet. - - BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - * Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following disclaimer - in the documentation and/or other materials provided with the - distribution. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - You can contact the author at : - - LZ4 homepage : http://www.lz4.org - - LZ4 source repository : https://github.com/lz4/lz4 - */ + * Copyright (C) 2011-2023, Yann Collet. + + BSD 2-Clause License (http://www.opensource.org/licenses/bsd-license.php) + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + You can contact the author at : + - LZ4 homepage : http://www.lz4.org + - LZ4 source repository : https://github.com/lz4/lz4 +*/ #if defined(__cplusplus) extern "C" { #endif @@ -45,27 +45,32 @@ extern "C" { /** Introduction - LZ4 is lossless compression algorithm, providing compression speed at 400 MB/s + LZ4 is lossless compression algorithm, providing compression speed >500 MB/s per core, scalable with multi-cores CPU. It features an extremely fast decoder, with speed in multiple GB/s per core, typically reaching RAM speed limits on multi-core systems. The LZ4 compression library provides in-memory compression and decompression - functions. Compression can be done in: + functions. It gives full buffer control to user. Compression can be done in: - a single step (described as Simple Functions) - a single step, reusing a context (described in Advanced Functions) - unbounded multiple steps (described as Streaming compression) - lz4.h provides block compression functions. It gives full buffer control to - user. Decompressing an lz4-compressed block also requires metadata (such as - compressed size). Each application is free to encode such metadata in - whichever way it wants. - - An additional format, called LZ4 frame specification - (doc/lz4_Frame_format.md), take care of encoding standard metadata alongside - LZ4-compressed blocks. If your application requires interoperability, it's - recommended to use it. A library is provided to take care of it, see - lz4frame.h. + lz4.h generates and decodes LZ4-compressed blocks (doc/lz4_Block_format.md). + Decompressing such a compressed block requires additional metadata. + Exact metadata depends on exact decompression function. + For the typical case of LZ4_decompress_safe(), + metadata includes block's compressed size, and maximum bound of decompressed + size. Each application is free to encode and pass such metadata in whichever + way it wants. + + lz4.h only handle blocks, it can not generate Frames. + + Blocks are different from Frames (doc/lz4_Frame_format.md). + Frames bundle both blocks and metadata in a specified manner. + Embedding metadata is required for compressed data to be self-contained and + portable. Frame format is delivered through a companion API, declared in + lz4frame.h. The `lz4` CLI can only manage frames. */ /*^*************************************************************** @@ -87,21 +92,50 @@ extern "C" { #if defined(LZ4_DLL_EXPORT) && (LZ4_DLL_EXPORT == 1) #define LZ4LIB_API __declspec(dllexport) LZ4LIB_VISIBILITY #elif defined(LZ4_DLL_IMPORT) && (LZ4_DLL_IMPORT == 1) -#define LZ4LIB_API \ - __declspec(dllimport) \ - LZ4LIB_VISIBILITY /* It isn't required but allows generating better \ - code, saving a function pointer load from the IAT \ - and an indirect jump. */ +#define LZ4LIB_API \ + __declspec(dllimport) \ + LZ4LIB_VISIBILITY /* It isn't required but allows to generate better code, \ + saving a function pointer load from the IAT and an \ + indirect jump.*/ #else #define LZ4LIB_API LZ4LIB_VISIBILITY #endif +/*! LZ4_FREESTANDING : + * When this macro is set to 1, it enables "freestanding mode" that is + * suitable for typical freestanding environment which doesn't support + * standard C library. + * + * - LZ4_FREESTANDING is a compile-time switch. + * - It requires the following macros to be defined: + * LZ4_memcpy, LZ4_memmove, LZ4_memset. + * - It only enables LZ4/HC functions which don't use heap. + * All LZ4F_* functions are not supported. + * - See tests/freestanding.c to check its basic setup. + */ +#if defined(LZ4_FREESTANDING) && (LZ4_FREESTANDING == 1) +#define LZ4_HEAPMODE 0 +#define LZ4HC_HEAPMODE 0 +#define LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION 1 +#if !defined(LZ4_memcpy) +#error "LZ4_FREESTANDING requires macro 'LZ4_memcpy'." +#endif +#if !defined(LZ4_memset) +#error "LZ4_FREESTANDING requires macro 'LZ4_memset'." +#endif +#if !defined(LZ4_memmove) +#error "LZ4_FREESTANDING requires macro 'LZ4_memmove'." +#endif +#elif !defined(LZ4_FREESTANDING) +#define LZ4_FREESTANDING 0 +#endif + /*------ Version ------*/ #define LZ4_VERSION_MAJOR 1 /* for breaking interface changes */ #define LZ4_VERSION_MINOR \ - 8 /* for new (non-breaking) interface capabilities \ + 10 /* for new (non-breaking) interface capabilities \ */ -#define LZ4_VERSION_RELEASE 2 /* for tweaks, bug-fixes, or development */ +#define LZ4_VERSION_RELEASE 0 /* for tweaks, bug-fixes, or development */ #define LZ4_VERSION_NUMBER \ (LZ4_VERSION_MAJOR * 100 * 100 + LZ4_VERSION_MINOR * 100 + \ @@ -110,55 +144,84 @@ extern "C" { #define LZ4_LIB_VERSION LZ4_VERSION_MAJOR.LZ4_VERSION_MINOR.LZ4_VERSION_RELEASE #define LZ4_QUOTE(str) #str #define LZ4_EXPAND_AND_QUOTE(str) LZ4_QUOTE(str) -#define LZ4_VERSION_STRING LZ4_EXPAND_AND_QUOTE(LZ4_LIB_VERSION) - -LZ4LIB_API int LZ4_versionNumber(void); -/**< library version number; useful to check dll version */ +#define LZ4_VERSION_STRING \ + LZ4_EXPAND_AND_QUOTE(LZ4_LIB_VERSION) /* requires v1.7.3+ */ -LZ4LIB_API const char *LZ4_versionString( - void); /**< library version string; unseful to check dll version */ +LZ4LIB_API int +LZ4_versionNumber(void); /**< library version number; useful to check dll + version; requires v1.3.0+ */ +LZ4LIB_API const char * +LZ4_versionString(void); /**< library version string; useful to check dll + version; requires v1.7.5+ */ /*-************************************ - * Tuning parameter + * Tuning memory usage **************************************/ /*! * LZ4_MEMORY_USAGE : + * Can be selected at compile time, by setting LZ4_MEMORY_USAGE. * Memory usage formula : N->2^N Bytes (examples : 10 -> 1KB; 12 -> 4KB ; 16 -> - * 64KB; 20 -> 1MB; etc.) Increasing memory usage improves compression ratio - * Reduced memory usage may improve speed, thanks to cache effect - * Default value is 14, for 16KB, which nicely fits into Intel x86 L1 cache + * 64KB; 20 -> 1MB) Increasing memory usage improves compression ratio, + * generally at the cost of speed. Reduced memory usage may improve speed at the + * cost of ratio, thanks to better cache locality. Default value is 14, for + * 16KB, which nicely fits into most L1 caches. */ #ifndef LZ4_MEMORY_USAGE -#define LZ4_MEMORY_USAGE 14 +#define LZ4_MEMORY_USAGE LZ4_MEMORY_USAGE_DEFAULT +#endif + +/* These are absolute limits, they should not be changed by users */ +#define LZ4_MEMORY_USAGE_MIN 10 +#define LZ4_MEMORY_USAGE_DEFAULT 14 +#define LZ4_MEMORY_USAGE_MAX 20 + +#if (LZ4_MEMORY_USAGE < LZ4_MEMORY_USAGE_MIN) +#error "LZ4_MEMORY_USAGE is too small !" +#endif + +#if (LZ4_MEMORY_USAGE > LZ4_MEMORY_USAGE_MAX) +#error "LZ4_MEMORY_USAGE is too large !" #endif /*-************************************ * Simple Functions **************************************/ /*! LZ4_compress_default() : - Compresses 'srcSize' bytes from buffer 'src' - into already allocated 'dst' buffer of size 'dstCapacity'. - Compression is guaranteed to succeed if 'dstCapacity' >= - LZ4_compressBound(srcSize). It also runs faster, so it's a recommended - setting. If the function cannot compress 'src' into a more limited 'dst' - budget, compression stops *immediately*, and the function result is zero. - Note : as a consequence, 'dst' content is not valid. - Note 2 : This function is protected against buffer overflow scenarios (never - writes outside 'dst' buffer, nor read outside 'source' buffer). srcSize : max - supported value is LZ4_MAX_INPUT_SIZE. dstCapacity : size of buffer 'dst' - (which must be already allocated) return : the number of bytes written into - buffer 'dst' (necessarily <= dstCapacity) or 0 if compression fails */ + * Compresses 'srcSize' bytes from buffer 'src' + * into already allocated 'dst' buffer of size 'dstCapacity'. + * Compression is guaranteed to succeed if 'dstCapacity' >= + * LZ4_compressBound(srcSize). It also runs faster, so it's a recommended + * setting. If the function cannot compress 'src' into a more limited 'dst' + * budget, compression stops *immediately*, and the function result is zero. In + * which case, 'dst' content is undefined (invalid). srcSize : max supported + * value is LZ4_MAX_INPUT_SIZE. dstCapacity : size of buffer 'dst' (which must + * be already allocated) + * @return : the number of bytes written into buffer 'dst' (necessarily <= + * dstCapacity) or 0 if compression fails Note : This function is protected + * against buffer overflow scenarios (never writes outside 'dst' buffer, nor + * read outside 'source' buffer). + */ LZ4LIB_API int LZ4_compress_default(const char *src, char *dst, int srcSize, int dstCapacity); /*! LZ4_decompress_safe() : - compressedSize : is the exact complete size of the compressed block. - dstCapacity : is the size of destination buffer, which must be already - allocated. return : the number of bytes decompressed into destination buffer - (necessarily <= dstCapacity) If destination buffer is not large enough, - decoding will stop and output an error code (negative value). If the source - stream is detected malformed, the function will stop decoding and return a - negative result. This function is protected against malicious data packets. + * @compressedSize : is the exact complete size of the compressed block. + * @dstCapacity : is the size of destination buffer (which must be already + * allocated), presumed an upper bound of decompressed size. + * @return : the number of bytes decompressed into destination buffer + * (necessarily <= dstCapacity) If destination buffer is not large enough, + * decoding will stop and output an error code (negative value). If the source + * stream is detected malformed, the function will stop decoding and return a + * negative result. Note 1 : This function is protected against malicious data + * packets : it will never writes outside 'dst' buffer, nor read outside + * 'source' buffer, even if the compressed block is maliciously modified to + * order the decoder to do these actions. In such case, the decoder stops + * immediately, and considers the compressed block malformed. Note 2 : + * compressedSize and dstCapacity must be provided to the function, the + * compressed block does not contain them. The implementation is free to send / + * store / derive this information in whichever way is most beneficial. If there + * is a need for a different format which bundles together both compressed data + * and its metadata, consider looking at lz4frame.h instead. */ LZ4LIB_API int LZ4_decompress_safe(const char *src, char *dst, int compressedSize, int dstCapacity); @@ -172,9 +235,8 @@ LZ4LIB_API int LZ4_decompress_safe(const char *src, char *dst, ? 0 \ : (isize) + ((isize) / 255) + 16) -/*! - LZ4_compressBound() : - Provides the maximum size that LZ4 compression may output in a "worst case" +/*! LZ4_compressBound() : + Provides the maximum size that LZ4 compression may output in a "worst case" scenario (input data not compressible) This function is primarily useful for memory allocation purposes (destination buffer size). Macro LZ4_COMPRESSBOUND() is also provided for compilation-time evaluation (stack @@ -183,79 +245,95 @@ LZ4LIB_API int LZ4_decompress_safe(const char *src, char *dst, supported value is LZ4_MAX_INPUT_SIZE return : maximum output size in a "worst case" scenario or 0, if input size is incorrect (too large or negative) - */ +*/ LZ4LIB_API int LZ4_compressBound(int inputSize); -/*! - LZ4_compress_fast() : - Same as LZ4_compress_default(), but allows selection of "acceleration" +/*! LZ4_compress_fast() : + Same as LZ4_compress_default(), but allows selection of "acceleration" factor. The larger the acceleration value, the faster the algorithm, but also the lesser the compression. It's a trade-off. It can be fine tuned, with each successive value providing roughly +~3% to speed. An acceleration value of "1" is the same as regular LZ4_compress_default() Values <= 0 will be - replaced by ACCELERATION_DEFAULT (currently == 1, see lz4.c). - */ + replaced by LZ4_ACCELERATION_DEFAULT (currently == 1, see lz4.c). Values > + LZ4_ACCELERATION_MAX will be replaced by LZ4_ACCELERATION_MAX (currently == + 65537, see lz4.c). +*/ LZ4LIB_API int LZ4_compress_fast(const char *src, char *dst, int srcSize, int dstCapacity, int acceleration); -/*! - LZ4_compress_fast_extState() : - Same compression function, just using an externally allocated memory space to - store compression state. Use LZ4_sizeofState() to know how much memory must - be allocated, and allocate it on 8-bytes boundaries (using malloc() - typically). Then, provide it as 'void* state' to compression function. +/*! LZ4_compress_fast_extState() : + * Same as LZ4_compress_fast(), using an externally allocated memory space for + * its state. Use LZ4_sizeofState() to know how much memory must be allocated, + * and allocate it on 8-bytes boundaries (using `malloc()` typically). + * Then, provide this buffer as `void* state` to compression function. */ LZ4LIB_API int LZ4_sizeofState(void); LZ4LIB_API int LZ4_compress_fast_extState(void *state, const char *src, char *dst, int srcSize, int dstCapacity, int acceleration); -/*! - LZ4_compress_destSize() : - Reverse the logic : compresses as much data as possible from 'src' buffer - into already allocated buffer 'dst' of size 'targetDestSize'. - This function either compresses the entire 'src' content into 'dst' if it's - large enough, or fill 'dst' buffer completely with as much data as possible - from 'src'. *srcSizePtr : will be modified to indicate how many bytes where - read from 'src' to fill 'dst'. New value is necessarily <= old value. return - : Nb bytes written into 'dst' (necessarily <= targetDestSize) or 0 if - compression fails +/*! LZ4_compress_destSize() : + * Reverse the logic : compresses as much data as possible from 'src' buffer + * into already allocated buffer 'dst', of size >= 'dstCapacity'. + * This function either compresses the entire 'src' content into 'dst' if it's + * large enough, or fill 'dst' buffer completely with as much data as possible + * from 'src'. note: acceleration parameter is fixed to "default". + * + * *srcSizePtr : in+out parameter. Initially contains size of input. + * Will be modified to indicate how many bytes where read from + * 'src' to fill 'dst'. New value is necessarily <= input value. + * @return : Nb bytes written into 'dst' (necessarily <= dstCapacity) + * or 0 if compression fails. + * + * Note : from v1.8.2 to v1.9.1, this function had a bug (fixed in v1.9.2+): + * the produced compressed content could, in specific circumstances, + * require to be decompressed into a destination buffer larger + * by at least 1 byte than the content to decompress. + * If an application uses `LZ4_compress_destSize()`, + * it's highly recommended to update liblz4 to v1.9.2 or better. + * If this can't be done or ensured, + * the receiving decompression function should provide + * a dstCapacity which is > decompressedSize, by at least 1 byte. + * See https://github.com/lz4/lz4/issues/859 for details */ LZ4LIB_API int LZ4_compress_destSize(const char *src, char *dst, int *srcSizePtr, int targetDstSize); -/*! - LZ4_decompress_fast() : **unsafe!** - This function is a bit faster than LZ4_decompress_safe(), - but it may misbehave on malformed input because it doesn't perform full - validation of compressed data. originalSize : is the uncompressed size to - regenerate Destination buffer must be already allocated, and its size must be - >= 'originalSize' bytes. return : number of bytes read from source buffer (== - compressed size). If the source stream is detected malformed, the function - stops decoding and return a negative result. note : This function is only - usable if the originalSize of uncompressed data is known in advance. The - caller should also check that all the compressed input has been consumed - properly, i.e. that the return value matches the size of the buffer with - compressed input. The function never writes past the output buffer. However, - since it doesn't know its 'src' size, it may read past the intended input. - Also, because match offsets are not validated during decoding, reads from - 'src' may underflow. Use this function in trusted environment **only**. - */ -LZ4LIB_API int LZ4_decompress_fast(const char *src, char *dst, - int originalSize); - -/*! - LZ4_decompress_safe_partial() : - This function decompress a compressed block of size 'srcSize' at position - 'src' into destination buffer 'dst' of size 'dstCapacity'. The function will - decompress a minimum of 'targetOutputSize' bytes, and stop after that. - However, it's not accurate, and may write more than 'targetOutputSize' (but - always <= dstCapacity). - @return : the number of bytes decoded in the destination buffer (necessarily - <= dstCapacity) Note : this number can also be < targetOutputSize, if - compressed block contains less data. Therefore, always control how many bytes - were decoded. If source stream is detected malformed, function returns a - negative result. This function is protected against malicious data packets. +/*! LZ4_decompress_safe_partial() : + * Decompress an LZ4 compressed block, of size 'srcSize' at position 'src', + * into destination buffer 'dst' of size 'dstCapacity'. + * Up to 'targetOutputSize' bytes will be decoded. + * The function stops decoding on reaching this objective. + * This can be useful to boost performance + * whenever only the beginning of a block is required. + * + * @return : the number of bytes decoded in `dst` (necessarily <= + * targetOutputSize) If source stream is detected malformed, function returns a + * negative result. + * + * Note 1 : @return can be < targetOutputSize, if compressed block contains + * less data. + * + * Note 2 : targetOutputSize must be <= dstCapacity + * + * Note 3 : this function effectively stops decoding on reaching + * targetOutputSize, so dstCapacity is kind of redundant. This is because in + * older versions of this function, decoding operation would still write + * complete sequences. Therefore, there was no guarantee that it would stop + * writing at exactly targetOutputSize, it could write more bytes, though only + * up to dstCapacity. Some "margin" used to be required for this operation to + * work properly. Thankfully, this is no longer necessary. The function + * nonetheless keeps the same signature, in an effort to preserve API + * compatibility. + * + * Note 4 : If srcSize is the exact size of the block, + * then targetOutputSize can be any value, + * including larger than the block's decompressed size. + * The function will, at most, generate block's decompressed size. + * + * Note 5 : If srcSize is _larger_ than block's compressed size, + * then targetOutputSize **MUST** be <= block's decompressed size. + * Otherwise, *silent corruption will occur*. */ LZ4LIB_API int LZ4_decompress_safe_partial(const char *src, char *dst, int srcSize, int targetOutputSize, @@ -266,45 +344,144 @@ LZ4LIB_API int LZ4_decompress_safe_partial(const char *src, char *dst, ***********************************************/ typedef union LZ4_stream_u LZ4_stream_t; /* incomplete type (defined later) */ -/*! LZ4_createStream() and LZ4_freeStream() : - * LZ4_createStream() will allocate and initialize an `LZ4_stream_t` structure. - * LZ4_freeStream() releases its memory. - */ +/*! + Note about RC_INVOKED + + - RC_INVOKED is predefined symbol of rc.exe (the resource compiler which is + part of MSVC/Visual Studio). + https://docs.microsoft.com/en-us/windows/win32/menurc/predefined-macros + + - Since rc.exe is a legacy compiler, it truncates long symbol (> 30 chars) + and reports warning "RC4011: identifier truncated". + + - To eliminate the warning, we surround long preprocessor symbol with + "#if !defined(RC_INVOKED) ... #endif" block that means + "skip this block when rc.exe is trying to read it". +*/ +#if !defined( \ + RC_INVOKED) /* https://docs.microsoft.com/en-us/windows/win32/menurc/predefined-macros \ + */ +#if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) LZ4LIB_API LZ4_stream_t *LZ4_createStream(void); LZ4LIB_API int LZ4_freeStream(LZ4_stream_t *streamPtr); +#endif /* !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) */ +#endif -/*! LZ4_resetStream() : - * An LZ4_stream_t structure can be allocated once and re-used multiple times. - * Use this function to start compressing a new stream. +/*! LZ4_resetStream_fast() : v1.9.0+ + * Use this to prepare an LZ4_stream_t for a new chain of dependent blocks + * (e.g., LZ4_compress_fast_continue()). + * + * An LZ4_stream_t must be initialized once before usage. + * This is automatically done when created by LZ4_createStream(). + * However, should the LZ4_stream_t be simply declared on stack (for example), + * it's necessary to initialize it first, using LZ4_initStream(). + * + * After init, start any new stream with LZ4_resetStream_fast(). + * A same LZ4_stream_t can be re-used multiple times consecutively + * and compress multiple streams, + * provided that it starts each new stream with LZ4_resetStream_fast(). + * + * LZ4_resetStream_fast() is much faster than LZ4_initStream(), + * but is not compatible with memory regions containing garbage data. + * + * Note: it's only useful to call LZ4_resetStream_fast() + * in the context of streaming compression. + * The *extState* functions perform their own resets. + * Invoking LZ4_resetStream_fast() before is redundant, and even + * counterproductive. */ -LZ4LIB_API void LZ4_resetStream(LZ4_stream_t *streamPtr); +LZ4LIB_API void LZ4_resetStream_fast(LZ4_stream_t *streamPtr); /*! LZ4_loadDict() : - * Use this function to load a static dictionary into LZ4_stream_t. - * Any previous data will be forgotten, only 'dictionary' will remain in - * memory. Loading a size of 0 is allowed, and is the same as reset. - * @return : dictionary size, in bytes (necessarily <= 64 KB) + * Use this function to reference a static dictionary into LZ4_stream_t. + * The dictionary must remain available during compression. + * LZ4_loadDict() triggers a reset, so any previous data will be forgotten. + * The same dictionary will have to be loaded on decompression side for + * successful decoding. Dictionary are useful for better compression of small + * data (KB range). While LZ4 itself accepts any input as dictionary, dictionary + * efficiency is also a topic. When in doubt, employ the Zstandard's Dictionary + * Builder. Loading a size of 0 is allowed, and is the same as reset. + * @return : loaded dictionary size, in bytes (note: only the last 64 KB are + * loaded) */ LZ4LIB_API int LZ4_loadDict(LZ4_stream_t *streamPtr, const char *dictionary, int dictSize); +/*! LZ4_loadDictSlow() : v1.10.0+ + * Same as LZ4_loadDict(), + * but uses a bit more cpu to reference the dictionary content more thoroughly. + * This is expected to slightly improve compression ratio. + * The extra-cpu cost is likely worth it if the dictionary is re-used across + * multiple sessions. + * @return : loaded dictionary size, in bytes (note: only the last 64 KB are + * loaded) + */ +LZ4LIB_API int LZ4_loadDictSlow(LZ4_stream_t *streamPtr, const char *dictionary, + int dictSize); + +/*! LZ4_attach_dictionary() : stable since v1.10.0 + * + * This allows efficient re-use of a static dictionary multiple times. + * + * Rather than re-loading the dictionary buffer into a working context before + * each compression, or copying a pre-loaded dictionary's LZ4_stream_t into a + * working LZ4_stream_t, this function introduces a no-copy setup mechanism, + * in which the working stream references @dictionaryStream in-place. + * + * Several assumptions are made about the state of @dictionaryStream. + * Currently, only states which have been prepared by LZ4_loadDict() or + * LZ4_loadDictSlow() should be expected to work. + * + * Alternatively, the provided @dictionaryStream may be NULL, + * in which case any existing dictionary stream is unset. + * + * If a dictionary is provided, it replaces any pre-existing stream history. + * The dictionary contents are the only history that can be referenced and + * logically immediately precede the data compressed in the first subsequent + * compression call. + * + * The dictionary will only remain attached to the working stream through the + * first compression call, at the end of which it is cleared. + * @dictionaryStream stream (and source buffer) must remain in-place / + * accessible / unchanged through the completion of the compression session. + * + * Note: there is no equivalent LZ4_attach_*() method on the decompression side + * because there is no initialization cost, hence no need to share the cost + * across multiple sessions. To decompress LZ4 blocks using dictionary, attached + * or not, just employ the regular LZ4_setStreamDecode() for streaming, or the + * stateless LZ4_decompress_safe_usingDict() for one-shot decompression. + */ +LZ4LIB_API void LZ4_attach_dictionary(LZ4_stream_t *workingStream, + const LZ4_stream_t *dictionaryStream); + /*! LZ4_compress_fast_continue() : * Compress 'src' content using data from previously compressed blocks, for * better compression ratio. 'dst' buffer must be already allocated. If * dstCapacity >= LZ4_compressBound(srcSize), compression is guaranteed to * succeed, and runs faster. * - * Important : The previous 64KB of compressed data is assumed to remain - * present and unmodified in memory! - * - * Special 1 : When input is a double-buffer, they can have any size, including - * < 64 KB. Make sure that buffers are separated by at least one byte. This way, - * each block only depends on previous block. Special 2 : If input buffer is a - * ring-buffer, it can have any size, including < 64 KB. - * * @return : size of compressed block * or 0 if there is an error (typically, cannot fit into 'dst'). - * After an error, the stream status is invalid, it can only be reset or freed. + * + * Note 1 : Each invocation to LZ4_compress_fast_continue() generates a new + * block. Each block has precise boundaries. Each block must be decompressed + * separately, calling LZ4_decompress_*() with relevant metadata. It's not + * possible to append blocks together and expect a single invocation of + * LZ4_decompress_*() to decompress them together. + * + * Note 2 : The previous 64KB of source data is __assumed__ to remain present, + * unmodified, at same address in memory ! + * + * Note 3 : When input is structured as a double-buffer, each buffer can have + * any size, including < 64 KB. Make sure that buffers are separated, by at + * least one byte. This construction ensures that each block only depends on + * previous block. + * + * Note 4 : If input buffer is a ring-buffer, it can have any size, including < + * 64 KB. + * + * Note 5 : After an error, the stream status is undefined (invalid), it can + * only be reset or freed. */ LZ4LIB_API int LZ4_compress_fast_continue(LZ4_stream_t *streamPtr, const char *src, char *dst, @@ -332,13 +509,19 @@ typedef union LZ4_streamDecode_u LZ4_streamDecode_t; /* tracking context */ * creation / destruction of streaming decompression tracking context. * A tracking context can be re-used multiple times. */ +#if !defined( \ + RC_INVOKED) /* https://docs.microsoft.com/en-us/windows/win32/menurc/predefined-macros \ + */ +#if !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) LZ4LIB_API LZ4_streamDecode_t *LZ4_createStreamDecode(void); LZ4LIB_API int LZ4_freeStreamDecode(LZ4_streamDecode_t *LZ4_stream); +#endif /* !defined(LZ4_STATIC_LINKING_ONLY_DISABLE_MEMORY_ALLOCATION) */ +#endif /*! LZ4_setStreamDecode() : * An LZ4_streamDecode_t context can be allocated once and re-used multiple * times. Use this function to start decompression of a new stream of blocks. A - * dictionary can optionnally be set. Use NULL or size 0 for a reset order. + * dictionary can optionally be set. Use NULL or size 0 for a reset order. * Dictionary is presumed stable : it must remain accessible and unmodified * during next decompression. * @return : 1 if OK, 0 if error @@ -346,7 +529,7 @@ LZ4LIB_API int LZ4_freeStreamDecode(LZ4_streamDecode_t *LZ4_stream); LZ4LIB_API int LZ4_setStreamDecode(LZ4_streamDecode_t *LZ4_streamDecode, const char *dictionary, int dictSize); -/*! LZ4_decoderRingBufferSize() : v1.8.2 +/*! LZ4_decoderRingBufferSize() : v1.8.2+ * Note : in a ring buffer scenario (optional), * blocks are presumed decompressed next to each other * up to the moment there is not enough remaining space for next block @@ -358,16 +541,31 @@ LZ4LIB_API int LZ4_setStreamDecode(LZ4_streamDecode_t *LZ4_streamDecode, * or 0 if there is an error (invalid maxBlockSize). */ LZ4LIB_API int LZ4_decoderRingBufferSize(int maxBlockSize); -#define LZ4_DECODER_RING_BUFFER_SIZE(mbs) \ - (65536 + 14 + (mbs)) /* for static allocation; mbs presumed valid */ - -/*! LZ4_decompress_*_continue() : - * These decoding functions allow decompression of consecutive blocks in - * "streaming" mode. A block is an unsplittable entity, it must be presented - * entirely to a decompression function. Decompression functions only accepts - * one block at a time. The last 64KB of previously decoded data *must* remain - * available and unmodified at the memory position where they were decoded. If - * less than 64KB of data has been decoded, all the data must be present. +#define LZ4_DECODER_RING_BUFFER_SIZE(maxBlockSize) \ + (65536 + 14 + \ + (maxBlockSize)) /* for static allocation; maxBlockSize presumed valid */ + +/*! LZ4_decompress_safe_continue() : + * This decoding function allows decompression of consecutive blocks in + * "streaming" mode. The difference with the usual independent blocks is that + * new blocks are allowed to find references into former blocks. + * A block is an unsplittable entity, and must be presented entirely to the + * decompression function. LZ4_decompress_safe_continue() only accepts one block + * at a time. It's modeled after `LZ4_decompress_safe()` and behaves similarly. + * + * @LZ4_streamDecode : decompression state, tracking the position in memory of + * past data + * @compressedSize : exact complete size of one compressed block. + * @dstCapacity : size of destination buffer (which must be already allocated), + * must be an upper bound of decompressed size. + * @return : number of bytes decompressed into destination buffer (necessarily + * <= dstCapacity) If destination buffer is not large enough, decoding will stop + * and output an error code (negative value). If the source stream is detected + * malformed, the function will stop decoding and return a negative result. + * + * The last 64KB of previously decoded data *must* remain available and + * unmodified at the memory position where they were previously decoded. If less + * than 64KB of data has been decoded, all the data must be present. * * Special : if decompression side sets a ring buffer, it must respect one of * the following conditions : @@ -397,130 +595,185 @@ LZ4LIB_API int LZ4_decompress_safe_continue(LZ4_streamDecode_t *LZ4_streamDecode, const char *src, char *dst, int srcSize, int dstCapacity); -LZ4LIB_API int -LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, - const char *src, char *dst, int originalSize); -/*! LZ4_decompress_*_usingDict() : - * These decoding functions work the same as +/*! LZ4_decompress_safe_usingDict() : + * Works the same as * a combination of LZ4_setStreamDecode() followed by - * LZ4_decompress_*_continue() They are stand-alone, and don't need an - * LZ4_streamDecode_t structure. Dictionary is presumed stable : it must remain - * accessible and unmodified during next decompression. + * LZ4_decompress_safe_continue() However, it's stateless: it doesn't need any + * LZ4_streamDecode_t state. Dictionary is presumed stable : it must remain + * accessible and unmodified during decompression. Performance tip : + * Decompression speed can be substantially increased when dst == dictStart + + * dictSize. */ LZ4LIB_API int LZ4_decompress_safe_usingDict(const char *src, char *dst, - int srcSize, int dstCapcity, - const char *dictStart, - int dictSize); -LZ4LIB_API int LZ4_decompress_fast_usingDict(const char *src, char *dst, - int originalSize, + int srcSize, int dstCapacity, const char *dictStart, int dictSize); -/*^********************************************** +/*! LZ4_decompress_safe_partial_usingDict() : + * Behaves the same as LZ4_decompress_safe_partial() + * with the added ability to specify a memory segment for past data. + * Performance tip : Decompression speed can be substantially increased + * when dst == dictStart + dictSize. + */ +LZ4LIB_API int LZ4_decompress_safe_partial_usingDict( + const char *src, char *dst, int compressedSize, int targetOutputSize, + int maxOutputSize, const char *dictStart, int dictSize); + +#endif /* LZ4_H_2983827168210 */ + +/*^************************************* * !!!!!! STATIC LINKING ONLY !!!!!! - ***********************************************/ + ***************************************/ -/*-************************************ - * Unstable declarations - ************************************** - * Declarations in this section should be considered unstable. - * Use at your own peril, etc., etc. - * They may be removed in the future. - * Their signatures may change. - **************************************/ +/*-**************************************************************************** + * Experimental section + * + * Symbols declared in this section must be considered unstable. Their + * signatures or semantics may change, or they may be removed altogether in the + * future. They are therefore only safe to depend on when the caller is + * statically linked against the library. + * + * To protect against unsafe usage, not only are the declarations guarded, + * the definitions are hidden by default + * when building LZ4 as a shared/dynamic library. + * + * In order to access these declarations, + * define LZ4_STATIC_LINKING_ONLY in your application + * before including LZ4's headers. + * + * In order to make their implementations accessible dynamically, you must + * define LZ4_PUBLISH_STATIC_FUNCTIONS when building the LZ4 library. + ******************************************************************************/ #ifdef LZ4_STATIC_LINKING_ONLY -/*! LZ4_resetStream_fast() : - * Use this, like LZ4_resetStream(), to prepare a context for a new chain of - * calls to a streaming API (e.g., LZ4_compress_fast_continue()). - * - * Note: - * Using this in advance of a non- streaming-compression function is redundant, - * and potentially bad for performance, since they all perform their own custom - * reset internally. - * - * Differences from LZ4_resetStream(): - * When an LZ4_stream_t is known to be in a internally coherent state, - * it can often be prepared for a new compression with almost no work, only - * sometimes falling back to the full, expensive reset that is always required - * when the stream is in an indeterminate state (i.e., the reset performed by - * LZ4_resetStream()). - * - * LZ4_streams are guaranteed to be in a valid state when: - * - returned from LZ4_createStream() - * - reset by LZ4_resetStream() - * - memset(stream, 0, sizeof(LZ4_stream_t)), though this is discouraged - * - the stream was in a valid state and was reset by LZ4_resetStream_fast() - * - the stream was in a valid state and was then used in any compression call - * that returned success - * - the stream was in an indeterminate state and was used in a compression - * call that fully reset the state (e.g., LZ4_compress_fast_extState()) and - * that returned success - * - * When a stream isn't known to be in a valid state, it is not safe to pass to - * any fastReset or streaming function. It must first be cleansed by the full - * LZ4_resetStream(). - */ -LZ4LIB_API void LZ4_resetStream_fast(LZ4_stream_t *streamPtr); +#ifndef LZ4_STATIC_3504398509 +#define LZ4_STATIC_3504398509 + +#ifdef LZ4_PUBLISH_STATIC_FUNCTIONS +#define LZ4LIB_STATIC_API LZ4LIB_API +#else +#define LZ4LIB_STATIC_API +#endif /*! LZ4_compress_fast_extState_fastReset() : * A variant of LZ4_compress_fast_extState(). * - * Using this variant avoids an expensive initialization step. It is only safe - * to call if the state buffer is known to be correctly initialized already - * (see above comment on LZ4_resetStream_fast() for a definition of "correctly - * initialized"). From a high level, the difference is that this function - * initializes the provided state with a call to something like - * LZ4_resetStream_fast() while LZ4_compress_fast_extState() starts with a - * call to LZ4_resetStream(). + * Using this variant avoids an expensive initialization step. + * It is only safe to call if the state buffer is known to be correctly + * initialized already (see above comment on LZ4_resetStream_fast() for a + * definition of "correctly initialized"). From a high level, the difference is + * that this function initializes the provided state with a call to something + * like LZ4_resetStream_fast() while LZ4_compress_fast_extState() starts with a + * call to LZ4_resetStream(). */ -LZ4LIB_API int LZ4_compress_fast_extState_fastReset(void *state, - const char *src, char *dst, - int srcSize, - int dstCapacity, - int acceleration); +LZ4LIB_STATIC_API int +LZ4_compress_fast_extState_fastReset(void *state, const char *src, char *dst, + int srcSize, int dstCapacity, + int acceleration); + +/*! LZ4_compress_destSize_extState() : introduced in v1.10.0 + * Same as LZ4_compress_destSize(), but using an externally allocated state. + * Also: exposes @acceleration + */ +int LZ4_compress_destSize_extState(void *state, const char *src, char *dst, + int *srcSizePtr, int targetDstSize, + int acceleration); -/*! LZ4_attach_dictionary() : - * This is an experimental API that allows for the efficient use of a - * static dictionary many times. +/*! In-place compression and decompression * - * Rather than re-loading the dictionary buffer into a working context before - * each compression, or copying a pre-loaded dictionary's LZ4_stream_t into a - * working LZ4_stream_t, this function introduces a no-copy setup mechanism, - * in which the working stream references the dictionary stream in-place. + * It's possible to have input and output sharing the same buffer, + * for highly constrained memory environments. + * In both cases, it requires input to lay at the end of the buffer, + * and decompression to start at beginning of the buffer. + * Buffer size must feature some margin, hence be larger than final size. * - * Several assumptions are made about the state of the dictionary stream. - * Currently, only streams which have been prepared by LZ4_loadDict() should - * be expected to work. + * |<------------------------buffer--------------------------------->| + * |<-----------compressed data--------->| + * |<-----------decompressed size------------------>| + * |<----margin---->| * - * Alternatively, the provided dictionary stream pointer may be NULL, in which - * case any existing dictionary stream is unset. + * This technique is more useful for decompression, + * since decompressed size is typically larger, + * and margin is short. * - * If a dictionary is provided, it replaces any pre-existing stream history. - * The dictionary contents are the only history that can be referenced and - * logically immediately precede the data compressed in the first subsequent - * compression call. + * In-place decompression will work inside any buffer + * which size is >= LZ4_DECOMPRESS_INPLACE_BUFFER_SIZE(decompressedSize). + * This presumes that decompressedSize > compressedSize. + * Otherwise, it means compression actually expanded data, + * and it would be more efficient to store such data with a flag indicating it's + * not compressed. This can happen when data is not compressible (already + * compressed, or encrypted). * - * The dictionary will only remain attached to the working stream through the - * first compression call, at the end of which it is cleared. The dictionary - * stream (and source buffer) must remain in-place / accessible / unchanged - * through the completion of the first compression call on the stream. + * For in-place compression, margin is larger, as it must be able to cope with + * both history preservation, requiring input data to remain unmodified up to + * LZ4_DISTANCE_MAX, and data expansion, which can happen when input is not + * compressible. As a consequence, buffer size requirements are much higher, and + * memory savings offered by in-place compression are more limited. + * + * There are ways to limit this cost for compression : + * - Reduce history size, by modifying LZ4_DISTANCE_MAX. + * Note that it is a compile-time constant, so all compressions will apply + * this limit. Lower values will reduce compression ratio, except when + * input_size < LZ4_DISTANCE_MAX, so it's a reasonable trick when inputs are + * known to be small. + * - Require the compressor to deliver a "maximum compressed size". + * This is the `dstCapacity` parameter in `LZ4_compress*()`. + * When this size is < LZ4_COMPRESSBOUND(inputSize), then compression can + * fail, in which case, the return code will be 0 (zero). The caller must be + * ready for these cases to happen, and typically design a backup scheme to send + * data uncompressed. The combination of both techniques can significantly + * reduce the amount of margin required for in-place compression. + * + * In-place compression can work in any buffer + * which size is >= (maxCompressedSize) + * with maxCompressedSize == LZ4_COMPRESSBOUND(srcSize) for guaranteed + * compression success. LZ4_COMPRESS_INPLACE_BUFFER_SIZE() depends on both + * maxCompressedSize and LZ4_DISTANCE_MAX, so it's possible to reduce memory + * requirements by playing with them. */ -LZ4LIB_API void LZ4_attach_dictionary(LZ4_stream_t *working_stream, - const LZ4_stream_t *dictionary_stream); +#define LZ4_DECOMPRESS_INPLACE_MARGIN(compressedSize) \ + (((compressedSize) >> 8) + 32) +#define LZ4_DECOMPRESS_INPLACE_BUFFER_SIZE(decompressedSize) \ + ((decompressedSize) + \ + LZ4_DECOMPRESS_INPLACE_MARGIN( \ + decompressedSize)) /**< note: presumes that compressedSize < \ + decompressedSize. note2: margin is \ + overestimated a bit, since it could use \ + compressedSize instead */ + +#ifndef LZ4_DISTANCE_MAX /* history window size; can be user-defined at \ + compile time */ +#define LZ4_DISTANCE_MAX 65535 /* set to maximum value by default */ #endif -/*-************************************ - * Private definitions - ************************************** - * Do not use these definitions. - * They are exposed to allow static allocation of `LZ4_stream_t` and - *`LZ4_streamDecode_t`. Using these definitions will expose code to API and/or +#define LZ4_COMPRESS_INPLACE_MARGIN \ + (LZ4_DISTANCE_MAX + 32) /* LZ4_DISTANCE_MAX can be safely replaced by \ + srcSize when it's smaller */ +#define LZ4_COMPRESS_INPLACE_BUFFER_SIZE(maxCompressedSize) \ + ((maxCompressedSize) + \ + LZ4_COMPRESS_INPLACE_MARGIN) /**< maxCompressedSize is generally \ + LZ4_COMPRESSBOUND(inputSize), but can be \ + set to any lower value, with the risk \ + that compression can fail (return code \ + 0(zero)) */ + +#endif /* LZ4_STATIC_3504398509 */ +#endif /* LZ4_STATIC_LINKING_ONLY */ + +#ifndef LZ4_H_98237428734687 +#define LZ4_H_98237428734687 + +/*-************************************************************ + * Private Definitions + ************************************************************** + * Do not use these definitions directly. + * They are only exposed to allow static allocation of `LZ4_stream_t` and + *`LZ4_streamDecode_t`. Accessing members will expose user code to API and/or *ABI break in future versions of the library. - **************************************/ + **************************************************************/ #define LZ4_HASHLOG (LZ4_MEMORY_USAGE - 2) #define LZ4_HASHTABLESIZE (1 << LZ4_MEMORY_USAGE) #define LZ4_HASH_SIZE_U32 \ @@ -529,75 +782,74 @@ LZ4LIB_API void LZ4_attach_dictionary(LZ4_stream_t *working_stream, #if defined(__cplusplus) || \ (defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) #include - -typedef struct LZ4_stream_t_internal LZ4_stream_t_internal; -struct LZ4_stream_t_internal { - uint32_t hashTable[LZ4_HASH_SIZE_U32]; - uint32_t currentOffset; - uint16_t initCheck; - uint16_t tableType; - const uint8_t *dictionary; - const LZ4_stream_t_internal *dictCtx; - uint32_t dictSize; -}; - -typedef struct { - const uint8_t *externalDict; - size_t extDictSize; - const uint8_t *prefixEnd; - size_t prefixSize; -} LZ4_streamDecode_t_internal; - +typedef int8_t LZ4_i8; +typedef uint8_t LZ4_byte; +typedef uint16_t LZ4_u16; +typedef uint32_t LZ4_u32; #else +typedef signed char LZ4_i8; +typedef unsigned char LZ4_byte; +typedef unsigned short LZ4_u16; +typedef unsigned int LZ4_u32; +#endif + +/*! LZ4_stream_t : + * Never ever use below internal definitions directly ! + * These definitions are not API/ABI safe, and may change in future versions. + * If you need static allocation, declare or allocate an LZ4_stream_t object. + **/ typedef struct LZ4_stream_t_internal LZ4_stream_t_internal; struct LZ4_stream_t_internal { - unsigned int hashTable[LZ4_HASH_SIZE_U32]; - unsigned int currentOffset; - unsigned short initCheck; - unsigned short tableType; - const unsigned char *dictionary; + LZ4_u32 hashTable[LZ4_HASH_SIZE_U32]; + const LZ4_byte *dictionary; const LZ4_stream_t_internal *dictCtx; - unsigned int dictSize; + LZ4_u32 currentOffset; + LZ4_u32 tableType; + LZ4_u32 dictSize; + /* Implicit padding to ensure structure is aligned */ }; +#define LZ4_STREAM_MINSIZE \ + ((1UL << (LZ4_MEMORY_USAGE)) + \ + 32) /* static size, for inter-version compatibility */ +union LZ4_stream_u { + char minStateSize[LZ4_STREAM_MINSIZE]; + LZ4_stream_t_internal internal_donotuse; +}; /* previously typedef'd to LZ4_stream_t */ + +/*! LZ4_initStream() : v1.9.0+ + * An LZ4_stream_t structure must be initialized at least once. + * This is automatically done when invoking LZ4_createStream(), + * but it's not when the structure is simply declared on stack (for example). + * + * Use LZ4_initStream() to properly initialize a newly declared LZ4_stream_t. + * It can also initialize any arbitrary buffer of sufficient size, + * and will @return a pointer of proper type upon initialization. + * + * Note : initialization fails if size and alignment conditions are not + *respected. In which case, the function will @return NULL. Note2: An + *LZ4_stream_t structure guarantees correct alignment and size. Note3: Before + *v1.9.0, use LZ4_resetStream() instead + **/ +LZ4LIB_API LZ4_stream_t *LZ4_initStream(void *stateBuffer, size_t size); + +/*! LZ4_streamDecode_t : + * Never ever use below internal definitions directly ! + * These definitions are not API/ABI safe, and may change in future versions. + * If you need static allocation, declare or allocate an LZ4_streamDecode_t + *object. + **/ typedef struct { - const unsigned char *externalDict; + const LZ4_byte *externalDict; + const LZ4_byte *prefixEnd; size_t extDictSize; - const unsigned char *prefixEnd; size_t prefixSize; } LZ4_streamDecode_t_internal; -#endif - -/*! - * LZ4_stream_t : - * information structure to track an LZ4 stream. - * init this structure before first use. - * note : only use in association with static linking ! - * this definition is not API/ABI safe, - * it may change in a future version ! - */ -#define LZ4_STREAMSIZE_U64 ((1 << (LZ4_MEMORY_USAGE - 3)) + 4) -#define LZ4_STREAMSIZE (LZ4_STREAMSIZE_U64 * sizeof(unsigned long long)) -union LZ4_stream_u { - unsigned long long table[LZ4_STREAMSIZE_U64]; - LZ4_stream_t_internal internal_donotuse; -}; /* previously typedef'd to LZ4_stream_t */ - -/*! - * LZ4_streamDecode_t : - * information structure to track an LZ4 stream during decompression. - * init this structure using LZ4_setStreamDecode (or memset()) before first use - * note : only use in association with static linking ! - * this definition is not API/ABI safe, - * and may change in a future version ! - */ -#define LZ4_STREAMDECODESIZE_U64 4 -#define LZ4_STREAMDECODESIZE \ - (LZ4_STREAMDECODESIZE_U64 * sizeof(unsigned long long)) +#define LZ4_STREAMDECODE_MINSIZE 32 union LZ4_streamDecode_u { - unsigned long long table[LZ4_STREAMDECODESIZE_U64]; + char minStateSize[LZ4_STREAMDECODE_MINSIZE]; LZ4_streamDecode_t_internal internal_donotuse; }; /* previously typedef'd to LZ4_streamDecode_t */ @@ -606,36 +858,41 @@ union LZ4_streamDecode_u { **************************************/ /*! Deprecation warnings - Should deprecation warnings be a problem, - it is generally possible to disable them, - typically with -Wno-deprecated-declarations for gcc - or _CRT_SECURE_NO_WARNINGS in Visual. - Otherwise, it's also possible to define LZ4_DISABLE_DEPRECATE_WARNINGS */ + * + * Deprecated functions make the compiler generate a warning when invoked. + * This is meant to invite users to update their source code. + * Should deprecation warnings be a problem, it is generally possible to + * disable them, typically with -Wno-deprecated-declarations for gcc or + * _CRT_SECURE_NO_WARNINGS in Visual. + * + * Another method is to define LZ4_DISABLE_DEPRECATE_WARNINGS + * before including the header file. + */ #ifdef LZ4_DISABLE_DEPRECATE_WARNINGS #define LZ4_DEPRECATED(message) /* disable deprecation warnings */ #else -#define LZ4_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__) #if defined(__cplusplus) && (__cplusplus >= 201402) /* C++14 or greater */ #define LZ4_DEPRECATED(message) [[deprecated(message)]] -#elif (LZ4_GCC_VERSION >= 405) || defined(__clang__) -#define LZ4_DEPRECATED(message) __attribute__((deprecated(message))) -#elif (LZ4_GCC_VERSION >= 301) -#define LZ4_DEPRECATED(message) __attribute__((deprecated)) #elif defined(_MSC_VER) #define LZ4_DEPRECATED(message) __declspec(deprecated(message)) +#elif defined(__clang__) || \ + (defined(__GNUC__) && (__GNUC__ * 10 + __GNUC_MINOR__ >= 45)) +#define LZ4_DEPRECATED(message) __attribute__((deprecated(message))) +#elif defined(__GNUC__) && (__GNUC__ * 10 + __GNUC_MINOR__ >= 31) +#define LZ4_DEPRECATED(message) __attribute__((deprecated)) #else #pragma message( \ - "WARNING: You need to implement LZ4_DEPRECATED for this compiler") -#define LZ4_DEPRECATED(message) + "WARNING: LZ4_DEPRECATED needs custom implementation for this compiler") +#define LZ4_DEPRECATED(message) /* disabled */ #endif #endif /* LZ4_DISABLE_DEPRECATE_WARNINGS */ -/* Obsolete compression functions */ +/*! Obsolete compression functions (since v1.7.3) */ LZ4_DEPRECATED("use LZ4_compress_default() instead") -LZ4LIB_API int LZ4_compress(const char *source, char *dest, int sourceSize); +LZ4LIB_API int LZ4_compress(const char *src, char *dest, int srcSize); LZ4_DEPRECATED("use LZ4_compress_default() instead") -LZ4LIB_API int LZ4_compress_limitedOutput(const char *source, char *dest, - int sourceSize, int maxOutputSize); +LZ4LIB_API int LZ4_compress_limitedOutput(const char *src, char *dest, + int srcSize, int maxOutputSize); LZ4_DEPRECATED("use LZ4_compress_fast_extState() instead") LZ4LIB_API int LZ4_compress_withState(void *state, const char *source, char *dest, int inputSize); @@ -654,14 +911,15 @@ int LZ4_compress_limitedOutput_continue(LZ4_stream_t *LZ4_streamPtr, const char *source, char *dest, int inputSize, int maxOutputSize); -/* Obsolete decompression functions */ +/*! Obsolete decompression functions (since v1.8.0) */ LZ4_DEPRECATED("use LZ4_decompress_fast() instead") LZ4LIB_API int LZ4_uncompress(const char *source, char *dest, int outputSize); LZ4_DEPRECATED("use LZ4_decompress_safe() instead") LZ4LIB_API int LZ4_uncompress_unknownOutputSize(const char *source, char *dest, int isize, int maxOutputSize); -/* Obsolete streaming functions; degraded functionality; do not use! +/* Obsolete streaming functions (since v1.7.0) + * degraded functionality; do not use! * * In order to perform streaming compression, these functions depended on data * that is no longer tracked in the state. They have been preserved as well as @@ -679,7 +937,7 @@ LZ4LIB_API int LZ4_resetStreamState(void *state, char *inputBuffer); LZ4_DEPRECATED("Use LZ4_saveDict() instead") LZ4LIB_API char *LZ4_slideInputBuffer(void *state); -/* Obsolete streaming decoding functions */ +/*! Obsolete streaming decoding functions (since v1.7.0) */ LZ4_DEPRECATED("use LZ4_decompress_safe_usingDict() instead") LZ4LIB_API int LZ4_decompress_safe_withPrefix64k(const char *src, char *dst, @@ -688,7 +946,64 @@ LZ4_DEPRECATED("use LZ4_decompress_fast_usingDict() instead") LZ4LIB_API int LZ4_decompress_fast_withPrefix64k(const char *src, char *dst, int originalSize); -#endif /* LZ4_H_2983827168210 */ +/*! Obsolete LZ4_decompress_fast variants (since v1.9.0) : + * These functions used to be faster than LZ4_decompress_safe(), + * but this is no longer the case. They are now slower. + * This is because LZ4_decompress_fast() doesn't know the input size, + * and therefore must progress more cautiously into the input buffer to not + * read beyond the end of block. On top of that `LZ4_decompress_fast()` is not + * protected vs malformed or malicious inputs, making it a security liability. + * As a consequence, LZ4_decompress_fast() is strongly discouraged, and + * deprecated. + * + * The last remaining LZ4_decompress_fast() specificity is that + * it can decompress a block without knowing its compressed size. + * Such functionality can be achieved in a more secure manner + * by employing LZ4_decompress_safe_partial(). + * + * Parameters: + * originalSize : is the uncompressed size to regenerate. + * `dst` must be already allocated, its size must be >= + * 'originalSize' bytes. + * @return : number of bytes read from source buffer (== compressed size). + * The function expects to finish at block's end exactly. + * If the source stream is detected malformed, the function stops + * decoding and returns a negative result. note : LZ4_decompress_fast*() + * requires originalSize. Thanks to this information, it never writes past the + * output buffer. However, since it doesn't know its 'src' size, it may read an + * unknown amount of input, past input buffer bounds. Also, since match offsets + * are not validated, match reads from 'src' may underflow too. These issues + * never happen if input (compressed) data is correct. But they may happen if + * input data is invalid (error or intentional tampering). As a consequence, use + * these functions in trusted environments with trusted data **only**. + */ +LZ4_DEPRECATED("This function is deprecated and unsafe. Consider using " + "LZ4_decompress_safe_partial() instead") +LZ4LIB_API int LZ4_decompress_fast(const char *src, char *dst, + int originalSize); +LZ4_DEPRECATED("This function is deprecated and unsafe. Consider migrating " + "towards LZ4_decompress_safe_continue() instead. " + "Note that the contract will change (requires block's " + "compressed size, instead of decompressed size)") +LZ4LIB_API int +LZ4_decompress_fast_continue(LZ4_streamDecode_t *LZ4_streamDecode, + const char *src, char *dst, int originalSize); +LZ4_DEPRECATED("This function is deprecated and unsafe. Consider using " + "LZ4_decompress_safe_partial_usingDict() instead") +LZ4LIB_API int LZ4_decompress_fast_usingDict(const char *src, char *dst, + int originalSize, + const char *dictStart, + int dictSize); + +/*! LZ4_resetStream() : + * An LZ4_stream_t structure must be initialized at least once. + * This is done with LZ4_initStream(), or LZ4_resetStream(). + * Consider switching to LZ4_initStream(), + * invoking LZ4_resetStream() will trigger deprecation warnings in the future. + */ +LZ4LIB_API void LZ4_resetStream(LZ4_stream_t *streamPtr); + +#endif /* LZ4_H_98237428734687 */ #if defined(__cplusplus) } diff --git a/lib/gis/parser_html.c b/lib/gis/parser_html.c index 581eaab2625..313b1e00748 100644 --- a/lib/gis/parser_html.c +++ b/lib/gis/parser_html.c @@ -3,7 +3,7 @@ \brief GIS Library - Argument parsing functions (HTML output) - (C) 2001-2024 by the GRASS Development Team + (C) 2001-2025 by the GRASS Development Team This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. diff --git a/lib/gis/parser_rest_md.c b/lib/gis/parser_rest_md.c index 551c72b40e7..eada518c236 100644 --- a/lib/gis/parser_rest_md.c +++ b/lib/gis/parser_rest_md.c @@ -4,7 +4,7 @@ \brief GIS Library - Argument parsing functions (reStructuredText and Markdown output) - (C) 2012-2023 by the GRASS Development Team + (C) 2012-2024 by the GRASS Development Team This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. @@ -20,6 +20,8 @@ #include "parser_local_proto.h" +#define MD_NEWLINE " " + static void usage_rest_md(bool rest); static void print_flag(const char *key, const char *label, const char *description, bool rest); @@ -41,7 +43,6 @@ void usage_rest_md(bool rest) struct Option *opt; struct Flag *flag; const char *type; - char *header = NULL; int new_prompt = 0; new_prompt = G__uses_new_gisprompt(); @@ -51,35 +52,17 @@ void usage_rest_md(bool rest) if (!st->pgm_name) st->pgm_name = "??"; - /* main header */ - G_asprintf(&header, "%s - GRASS GIS manual", st->pgm_name); - if (rest) { - size_t s; - fprintf(stdout, "%s\n", header); - for (s = 0; s < strlen(header); s++) { - fprintf(stdout, "="); - } - fprintf(stdout, "\n"); - } - else { - fprintf(stdout, "# %s\n", header); - } - fprintf(stdout, "\n"); + /* print metadata used by man/build*.py */ + fprintf(stdout, "---\n"); + fprintf(stdout, "name: %s\n", st->pgm_name); + fprintf(stdout, "description: %s\n", st->module_info.description); + fprintf(stdout, "keywords: "); + G__print_keywords(stdout, NULL, FALSE); + fprintf(stdout, "\n---\n\n"); - /* GRASS GIS logo */ - if (rest) { - fprintf(stdout, ".. image:: grass_logo.png\n"); - fprintf(stdout, " :align: center\n"); - fprintf(stdout, " :alt: GRASS logo\n"); - } - else { - fprintf(stdout, "![GRASS logo](./grass_logo.png)\n"); - } - /* horizontal line */ - fprintf(stdout, "\n---"); - if (rest) - fprintf(stdout, "-"); - fprintf(stdout, "\n\n"); + /* main header */ + if (!rest) + fprintf(stdout, "# %s\n\n", st->pgm_name); /* header - GRASS module */ if (!rest) @@ -88,7 +71,7 @@ void usage_rest_md(bool rest) if (rest) fprintf(stdout, "----"); fprintf(stdout, "\n"); - fprintf(stdout, "**%s**", st->pgm_name); + fprintf(stdout, "***%s***", st->pgm_name); if (st->module_info.label || st->module_info.description) fprintf(stdout, " - "); @@ -130,13 +113,13 @@ void usage_rest_md(bool rest) } fprintf(stdout, "**%s**", st->pgm_name); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); if (rest) fprintf(stdout, "| "); fprintf(stdout, "**%s --help**", st->pgm_name); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); if (rest) fprintf(stdout, "| "); @@ -219,7 +202,7 @@ void usage_rest_md(bool rest) while (st->n_flags && flag != NULL) { print_flag(&flag->key, flag->label, flag->description, rest); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); flag = flag->next_flag; } @@ -228,21 +211,21 @@ void usage_rest_md(bool rest) _("Allow output files to overwrite existing files"), rest); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); } } print_flag("help", NULL, _("Print usage summary"), rest); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); print_flag("verbose", NULL, _("Verbose module output"), rest); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); print_flag("quiet", NULL, _("Quiet module output"), rest); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); print_flag("ui", NULL, _("Force launching GUI dialog"), rest); fprintf(stdout, "\n"); @@ -263,7 +246,7 @@ void usage_rest_md(bool rest) opt = opt->next_opt; if (opt != NULL) { if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); } fprintf(stdout, "\n"); } @@ -284,7 +267,7 @@ void print_flag(const char *key, const char *label, const char *description, fprintf(stdout, "-"); fprintf(stdout, "-%s**", key); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); if (label != NULL) { if (rest) @@ -292,13 +275,15 @@ void print_flag(const char *key, const char *label, const char *description, print_escaped(stdout, "\t", rest); print_escaped(stdout, label, rest); if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); } - if (rest) - fprintf(stdout, "| "); - print_escaped(stdout, "\t", rest); - print_escaped(stdout, description, rest); + if (description != NULL) { + if (rest) + fprintf(stdout, "| "); + print_escaped(stdout, "\t", rest); + print_escaped(stdout, description, rest); + } } void print_option(const struct Option *opt, bool rest, char *image_spec_rest) @@ -341,7 +326,7 @@ void print_option(const struct Option *opt, bool rest, char *image_spec_rest) fprintf(stdout, " **[required]**"); } if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); if (opt->label) { if (rest) @@ -352,7 +337,7 @@ void print_option(const struct Option *opt, bool rest, char *image_spec_rest) if (opt->description) { if (opt->label) { if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); } if (rest) @@ -363,7 +348,7 @@ void print_option(const struct Option *opt, bool rest, char *image_spec_rest) if (opt->options) { if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); if (rest) fprintf(stdout, "| "); @@ -375,7 +360,7 @@ void print_option(const struct Option *opt, bool rest, char *image_spec_rest) if (opt->def) { if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); if (rest) fprintf(stdout, "| "); @@ -394,7 +379,7 @@ void print_option(const struct Option *opt, bool rest, char *image_spec_rest) while (opt->opts[i]) { if (opt->descs[i]) { if (!rest) - fprintf(stdout, "\\"); + fprintf(stdout, MD_NEWLINE); fprintf(stdout, "\n"); char *thumbnails = NULL; if (opt->gisprompt) { @@ -518,7 +503,7 @@ void print_escaped_for_md_keywords(FILE *f, const char *str) str_s = G_store(str); G_strip(str_s); - /* HTML link only for second keyword */ + /* HTML link only for second keyword = topic */ if (st->n_keys > 1 && strcmp(st->module_info.keywords[1], str) == 0) { const char *s; @@ -532,7 +517,7 @@ void print_escaped_for_md_keywords(FILE *f, const char *str) fputc(*s, f); } } - fprintf(f, ".html)"); + fprintf(f, ".md)"); } else { /* first and other than second keyword */ if (st->n_keys > 0 && strcmp(st->module_info.keywords[0], str) == 0) { @@ -547,13 +532,14 @@ void print_escaped_for_md_keywords(FILE *f, const char *str) fputc(*s, f); } } - fprintf(f, ".html)"); + fprintf(f, ".md)"); } else { - /* keyword index */ + /* keyword index, mkdocs expects dash */ char *str_link; - str_link = G_str_replace(str_s, " ", "%20"); - fprintf(f, "[%s](keywords.html#%s)", str_s, str_link); + str_link = G_str_replace(str_s, " ", "-"); + G_str_to_lower(str_link); + fprintf(f, "[%s](keywords.md#%s)", str_s, str_link); G_free(str_link); } } diff --git a/lib/gis/testsuite/test_gis_lib_getl.py b/lib/gis/testsuite/test_gis_lib_getl.py index 72e8992ad39..1ac9907f3d9 100644 --- a/lib/gis/testsuite/test_gis_lib_getl.py +++ b/lib/gis/testsuite/test_gis_lib_getl.py @@ -36,7 +36,8 @@ def read_lines_and_assert(self, get_line_function, newline): file_ptr = self.libc.fopen(str(self.file_path).encode("utf-8"), b"r") if not file_ptr: - raise FileNotFoundError(f"Could not open file: {self.file_path}") + msg = f"Could not open file: {self.file_path}" + raise FileNotFoundError(msg) try: buffer_size = 50 diff --git a/lib/gpde/n_arrays_io.c b/lib/gpde/n_arrays_io.c index eae8a9065c6..5836df00754 100644 --- a/lib/gpde/n_arrays_io.c +++ b/lib/gpde/n_arrays_io.c @@ -150,6 +150,7 @@ N_array_2d *N_read_rast_to_array_2d(char *name, N_array_2d *array) /* Close file */ Rast_close(map); + G_free(rast); return data; } @@ -219,6 +220,9 @@ void N_write_array_2d_to_rast(N_array_2d *array, char *name) /* Close file */ Rast_close(map); + G_free(rast); + G_free(frast); + G_free(drast); } /* ******************** 3D ARRAY FUNCTIONS *********************** */ diff --git a/lib/gpde/n_les_assemble.c b/lib/gpde/n_les_assemble.c index 0d90e09c4f7..fb08ed8046d 100644 --- a/lib/gpde/n_les_assemble.c +++ b/lib/gpde/n_les_assemble.c @@ -875,6 +875,8 @@ int N_les_integrate_dirichlet_2d(N_les *les, N_geom_data *geom, count++; } } + G_free(dvect1); + G_free(dvect2); return 0; } diff --git a/lib/htmldriver/htmldriver.html b/lib/htmldriver/htmldriver.html index 3e14f240805..a8ff2f07b43 100644 --- a/lib/htmldriver/htmldriver.html +++ b/lib/htmldriver/htmldriver.html @@ -53,20 +53,20 @@

      Environment variables

    34. GRASS_RENDER_HTMLTYPE=type
      type of image map to create (default is CLIENT):
      -
      CLIENT    Netscape/IE client-side - image map (NAME="map").
      -
      APACHE    Apache/NCSA server-side image - map.
      -
      RAW -         Raw url and polygon - vertices (url  x1  y1  x2  y2  - .....), suitable for conversion to CERN server format, or - any other format with user supplied conversion program.
      +
      CLIENT    Netscape/IE client-side + image map (NAME="map").
      +
      APACHE    Apache/NCSA server-side image + map.
      +
      RAW +         Raw url and polygon + vertices (url  x1  y1  x2  y2  + .....), suitable for conversion to CERN server format, or + any other format with user supplied conversion program.
    35. GRASS_RENDER_FILE=filename
      specifies the resulting file to store the html image map, default - is htmlmap. Files without absolute path names are written + is htmlmap. Files without absolute path names are written in the current directory where the driver was started.
      Any existing file of the same name is overwritten without @@ -75,21 +75,21 @@

      Environment variables

    36. GRASS_RENDER_HTMLMINDIST=n
      specifies the minimum distance in pixels that a point must change from the previous point to keep in the list of vertices for a - polygon. The default is 2, which means that a point's x + polygon. The default is 2, which means that a point's x and y difference from the previous point must change by a number of pixels greater than this value. This parameter helps to eliminate closely spaced points.
    37. GRASS_RENDER_HTMLMINBBOX=n
      specifies the minimum bounding box dimensions to record a polygon - as a clickable area. The default is 2, which means that a + as a clickable area. The default is 2, which means that a polygon with a bounding box of less than this value is not included. This parameter helps to eliminate polygons than are a point or line.
    38. GRASS_RENDER_HTMLMAXPOINTS=n
      specifies the maximum number of vertices included in a polygon's - clickable area. The default is 99. Some browsers can only + clickable area. The default is 99. Some browsers can only accommodate polygons of 100 vertices or less. The HTMLMAP driver automatically ensures that a polygon is closed by making the last point the same as the first point.
    39. @@ -172,7 +172,7 @@

      NOTES

      If you create an HTML file with two or more images and image maps, you will need to edit the map names. The HTML driver creates its map with the name -map. A small sed script can easily change the map name: +map. A small sed script can easily change the map name:

       sed -e 's/NAME="map"/NAME="foomap"/' < htmlmap > foomap.html
      @@ -185,7 +185,9 @@ 

      SEE ALSO

      PNG driver, HTML driver, variables -

      +
      +

      + d.rast, d.vect, d.mon, diff --git a/lib/init/Makefile b/lib/init/Makefile index f6983faada3..2db853fce95 100644 --- a/lib/init/Makefile +++ b/lib/init/Makefile @@ -47,8 +47,9 @@ ifneq ($(strip $(MINGW)),) endif HTMLFILES := $(wildcard *.html) +MDFILES := $(wildcard *.md) -default: $(FILES) $(patsubst %,$(HTMLDIR)/%,$(HTMLFILES)) +default: $(FILES) $(patsubst %,$(HTMLDIR)/%,$(HTMLFILES)) $(patsubst %,$(MDDIR)/source/%,$(MDFILES)) ifneq ($(strip $(MINGW)),) $(ARCH_BINDIR)/$(START_UP): grass.sh diff --git a/lib/init/grass.html b/lib/init/grass.html index 5b2d918fb12..18747b196ae 100644 --- a/lib/init/grass.html +++ b/lib/init/grass.html @@ -72,7 +72,7 @@

      Parameters:

      GISDBASE
      Initial database directory which should be a fully qualified path - (e.g., /usr/local/share/grassdata) + (e.g., /usr/local/share/grassdata)
      PROJECT
      Initial project directory which is a subdirectory of GISDBASE @@ -128,15 +128,15 @@

      Config flag

      version parameters, with the options:
        -
      • arch: system architecture (e.g., x86_64-pc-linux-gnu)
      • -
      • build: (e.g., ./configure --with-cxx --enable-largefile --with-proj [...])
      • -
      • compiler: (e.g., gcc)
      • -
      • date: (e.g., 2024-04-10T11:44:54+00:00)
      • -
      • path: (e.g., /usr/lib64/grass)
      • -
      • python_path: (e.g., /usr/lib64/grass/etc/python)
      • -
      • revision: (e.g., 745ee7ec9)
      • -
      • svn_revision: (e.g., 062bffc8)
      • -
      • version: (e.g., 8.4.0)
      • +
      • arch: system architecture (e.g., x86_64-pc-linux-gnu)
      • +
      • build: (e.g., ./configure --with-cxx --enable-largefile --with-proj [...])
      • +
      • compiler: (e.g., gcc)
      • +
      • date: (e.g., 2024-04-10T11:44:54+00:00)
      • +
      • path: (e.g., /usr/lib64/grass)
      • +
      • python_path: (e.g., /usr/lib64/grass/etc/python)
      • +
      • revision: (e.g., 745ee7ec9)
      • +
      • svn_revision: (e.g., 062bffc8)
      • +
      • version: (e.g., 8.4.0)

      SAMPLE DATA

      @@ -157,7 +157,7 @@

      ENVIRONMENT VARIABLES

      bother with these.

      In addition to these shell environment variables GRASS maintains a -number of GIS environment variables in the $HOME/.grass8/rc +number of GIS environment variables in the $HOME/.grass8/rc file. User changes to this file will be read during the next startup of GRASS. If this file becomes corrupted the user may edit it by hand or remove it to start afresh. See the list @@ -167,7 +167,7 @@

      ENVIRONMENT VARIABLES

      Note that you will need to set these variables using the appropriate method required for the UNIX shell that you use (e.g. in a -Bash shell you must export the variables for them to +Bash shell you must export the variables for them to propagate).

      User Interface Environment Variable

      @@ -176,28 +176,28 @@

      User Interface Environment Variable

      environment variable called GRASS_GUI which indicates the type of user interface for GRASS to use. If this variable is not set when grass is run, then it will be created and then saved -in the $HOME/.grass8/rc file for the next time GRASS is -run. It can be set to text, gtext or gui. +in the $HOME/.grass8/rc file for the next time GRASS is +run. It can be set to text, gtext or gui.

      There is an order of precedence in the way grass determines the user interface to use. The following is the hierarchy from highest precedence to lowest.

        -
      1. Command line argument -
      2. Environment variable GRASS_GUI -
      3. Value set in $HOME/.grass8/rc (GUI) -
      4. Default value - gui +
      5. Command line argument
      6. +
      7. Environment variable GRASS_GUI
      8. +
      9. Value set in $HOME/.grass8/rc (GUI)
      10. +
      11. Default value - gui

      Python Environment Variables

      If you choose to use wxGUI interface, then the GRASS_PYTHON environment variable can be used to -override your system default python command. +override your system default python command.

      Suppose for example your system has Python 3.6 installed and you install a personal version of the Python 3.8 binaries -under $HOME/bin. You can use the above variables to have +under $HOME/bin. You can use the above variables to have GRASS use the Python 3.8 binaries instead.

      @@ -233,8 +233,8 @@ 

      Addon Base for Extra Local GRASS Addon Modules

      path environment.

      If not defined by user, this variable is set by GRASS startup program -to $HOME/.grass8/addons on GNU/Linux -and %APPDATA%\Roaming\GRASS8\addons on MS Windows. +to $HOME/.grass8/addons on GNU/Linux +and %APPDATA%\Roaming\GRASS8\addons on MS Windows.

      HTML Browser Variable

      @@ -455,20 +455,20 @@

      Using temporary mapset

      Troubleshooting

      -Importantly, to avoid an "[Errno 8] Exec format error" there must be a +Importantly, to avoid an "[Errno 8] Exec format error" there must be a shebang line at the top of -the script (like #!/bin/sh, #!/bin/bash, or #!/usr/bin/env python3) +the script (like #!/bin/sh, #!/bin/bash, or #!/usr/bin/env python3) indicating which interpreter to be used for the script. The script file must have its executable bit set.

      CAVEAT

      If you start GRASS using the wxGUI -interface you must have a python command in your $PATH +interface you must have a python command in your $PATH variable. That is, the command must be named -python and not something like python3.6. Rarely some -Python installations do not create a python command. In these -cases you can override python by GRASS_PYTHON environmental +python and not something like python3.6. Rarely some +Python installations do not create a python command. In these +cases you can override python by GRASS_PYTHON environmental variable.

      Furthermore, if you have more than one version of Python installed, diff --git a/lib/init/grass.py b/lib/init/grass.py index 505d48dadae..c2ac803e6a2 100755 --- a/lib/init/grass.py +++ b/lib/init/grass.py @@ -18,7 +18,7 @@ # command line options for setting the GISDBASE, LOCATION, # and/or MAPSET. Finally it starts GRASS with the appropriate # user interface and cleans up after it is finished. -# COPYRIGHT: (C) 2000-2024 by the GRASS Development Team +# COPYRIGHT: (C) 2000-2025 by the GRASS Development Team # # This program is free software under the GNU General # Public License (>=v2). Read the file COPYING that @@ -231,7 +231,8 @@ def f(fmt, *args): matches.append(m) if len(matches) != len(args): - raise Exception("The numbers of format specifiers and arguments do not match") + msg = "The numbers of format specifiers and arguments do not match" + raise Exception(msg) i = len(args) - 1 for m in reversed(matches): @@ -1015,7 +1016,7 @@ def load_env(grass_env_file): # Regular expression for lines starting with "export var=val" (^export # lines below). Environment variables should start with a-zA-Z or _. # \1 and \2 are a variable name and its value, respectively. - export_re = re.compile("^export[ \t]+([a-zA-Z_]+[a-zA-Z0-9_]*)=(.*?)[ \t]*$") + export_re = re.compile(r"^export[ \t]+([a-zA-Z_]+[a-zA-Z0-9_]*)=(.*?)[ \t]*$") for line in readfile(grass_env_file).splitlines(): # match ^export lines @@ -1610,9 +1611,8 @@ def sh_like_startup(location, location_name, grass_env_file, sh): shrc = ".zshrc" grass_shrc = ".grass.zshrc" else: - raise ValueError( - "Only bash-like and zsh shells are supported by sh_like_startup()" - ) + msg = "Only bash-like and zsh shells are supported by sh_like_startup()" + raise ValueError(msg) # save command history in mapset dir and remember more # bash history file handled in specific_addition @@ -2106,10 +2106,11 @@ def find_grass_python_package(): # now we can import stuff from grass package else: # Not translatable because we don't have translations loaded. - raise RuntimeError( + msg = ( "The grass Python package is missing. " "Is the installation of GRASS GIS complete?" ) + raise RuntimeError(msg) def main(): diff --git a/lib/init/grass.sh b/lib/init/grass.sh index 2a83e7448c3..33630b14362 100755 --- a/lib/init/grass.sh +++ b/lib/init/grass.sh @@ -13,7 +13,7 @@ # setting the GISDBASE, LOCATION, and/or MAPSET. # Finally it starts GRASS with the appropriate user # interface and cleans up after it is finished. -# COPYRIGHT: (C) 2000-2024 by the GRASS Development Team +# COPYRIGHT: (C) 2000-2025 by the GRASS Development Team # # This program is free software under the GNU General # Public License (>=v2). Read the file COPYING that diff --git a/lib/init/variables.html b/lib/init/variables.html index 5dfd113dd85..4bd369abf4b 100644 --- a/lib/init/variables.html +++ b/lib/init/variables.html @@ -1,5 +1,11 @@ -There are two types of variables: + +A variable in scripting is a symbolic name that holds data which can be +used and modified during script execution. Variables allow scripts to +store and manipulate values dynamically, making them more flexible and +reusable. + +In GRASS GIS, there are two types of variables:

      • shell environment variables,
      • @@ -42,18 +48,18 @@

        Setting shell environment variables

      • To get personal BASH shell definitions (aliases, color listing option, ...) into GRASS, store them in:
        -$HOME/.grass8/bashrc
      • +$HOME/.grass8/bashrc
      • To get personal CSH shell definitions (aliases, color listing option, ...) into GRASS, store them in:
        -$HOME/.grass8/cshrc
      • +$HOME/.grass8/cshrc

      Setting GRASS gisenv variables

      Use g.gisenv within GRASS. This permanently -predefines GRASS variables in the $HOME/.grass8/rc file (Linux, Mac, BSD, ...) -or in the %APPDATA%\Roaming\GRASS8\rc file (Windows) after the +predefines GRASS variables in the $HOME/.grass8/rc file (Linux, Mac, BSD, ...) +or in the %APPDATA%\Roaming\GRASS8\rc file (Windows) after the current GRASS session is closed.

      Usage: @@ -83,7 +89,7 @@

      List of selected (GRASS related) shell environment variables

      startup script.
      GISRC
      -
      name of $HOME/.grass8/rc file. Defines the system wide value +
      name of $HOME/.grass8/rc file. Defines the system wide value when starting a GRASS session. Within a GRASS session, a temporary copy of this file will be used.
      @@ -95,10 +101,10 @@

      List of selected (GRASS related) shell environment variables

      GRASS_ADDON_BASE
      [grass startup script]
      allows specifying additional GISBASE for local GRASS modules (normally installed as GRASS Addons - by g.extension module) extra to standard + by g.extension module) extra to standard distribution. The default on GNU/Linux - is $HOME/.grass8/addons, on MS - Windows %APPDATA%\Roaming\GRASS8\addons.
      + is $HOME/.grass8/addons, on MS + Windows %APPDATA%\Roaming\GRASS8\addons.
      GRASS_ADDON_ETC
      [libgis, g.findetc]
      @@ -108,9 +114,9 @@

      List of selected (GRASS related) shell environment variables

      GRASS_COMPATIBILITY_TEST
      [libgis]
      By default it is not possible to run C modules with a libgis that has a - different GIS_H_VERSION, the compatibility test will exit with a + different GIS_H_VERSION, the compatibility test will exit with a fatal error. Setting this variable to 0 (zero) with - GRASS_COMPATIBILITY_TEST=0 allows the test to be passed with a + GRASS_COMPATIBILITY_TEST=0 allows the test to be passed with a warning.
      GRASS_COMPRESSOR
      @@ -119,7 +125,7 @@

      List of selected (GRASS related) shell environment variables

      environment variable GRASS_COMPRESSOR. Supported methods are RLE, ZLIB, LZ4, BZIP2, and ZSTD. The default is ZSTD if available, otherwise ZLIB, which can be changed with e.g. - GRASS_COMPRESSOR=ZSTD, granted that GRASS has been + GRASS_COMPRESSOR=ZSTD, granted that GRASS has been compiled with the requested compressor. Compressors that are always available are RLE, ZLIB, and LZ4. The compressors BZIP2 and ZSTD must be enabled when configuring GRASS for compilation. @@ -128,8 +134,8 @@

      List of selected (GRASS related) shell environment variables

      [grass startup script]
      specifies root path for GRASS configuration directory. If not specified, the default placement of the - configuration directory is used: $HOME on GNU/Linux, - $HOME/Library on Mac OS X, and %APPDATA% on MS Windows.
      + configuration directory is used: $HOME on GNU/Linux, + $HOME/Library on Mac OS X, and %APPDATA% on MS Windows.
      GRASS_DB_ENCODING
      [various modules, wxGUI]
      @@ -139,7 +145,7 @@

      List of selected (GRASS related) shell environment variables

      If set, GIS_ERROR_LOG should be the absolute path to the log file (a relative path will be interpreted relative to the process' cwd, not the cwd at the point the user set the variable). If not - set, $HOME/GIS_ERROR_LOG is used instead. The file will + set, $HOME/GIS_ERROR_LOG is used instead. The file will only be used if it already exists.
      GRASS_ERROR_MAIL
      @@ -149,7 +155,7 @@

      List of selected (GRASS related) shell environment variables

      GRASS_FONT
      [display drivers]
      specifies the font as either the name of a font from - $GISBASE/etc/fontcap (or alternative fontcap file + $GISBASE/etc/fontcap (or alternative fontcap file specified by GRASS_FONT_CAP), or alternatively the full path to a FreeType font file.
      @@ -160,7 +166,7 @@

      List of selected (GRASS related) shell environment variables

      GRASS_FONT_CAP
      [g.mkfontcap, d.font, display drivers]
      - specifies an alternative location (to $GISBASE/etc/fontcap) for + specifies an alternative location (to $GISBASE/etc/fontcap) for the font configuration file.
      GRASS_FULL_OPTION_NAMES
      @@ -169,10 +175,10 @@

      List of selected (GRASS related) shell environment variables

      a found string is not an exact match for the given string.
      GRASS_GUI
      -
      either text (text user interface), gtext (text - user interface with GUI welcome screen), or gui (graphical +
      either text (text user interface), gtext (text + user interface with GUI welcome screen), or gui (graphical user interface) to define non-/graphical startup. Can also specify - the name of the GUI to use, e.g. wxpython + the name of the GUI to use, e.g. wxpython (wxGUI). Also exists as a GRASS gisenv variable (see below). If this shell variable exists at GRASS startup, it will determine the GUI used. If it is not defined @@ -198,11 +204,11 @@

      List of selected (GRASS related) shell environment variables

      using RLE compression.

      If the variable doesn't exist, or the value is non-zero, zlib compression - will be used instead. Such rasters will have a compressed + will be used instead. Such rasters will have a compressed value of 2 in the cellhd file.

      Obviously, decompression is controlled by the - raster's compressed value, not the environment variable.
      + raster's compressed value, not the environment variable.
      GRASS_ZLIB_LEVEL
      [libgis]
      if the environment variable GRASS_ZLIB_LEVEL exists and its value can @@ -210,9 +216,9 @@

      List of selected (GRASS related) shell environment variables

      raster maps are compressed using zlib compression. This applies to all raster map types (CELL, FCELL, DCELL).

      - Valid zlib compression levels are -1 to 9. The GRASS_ZLIB_LEVEL=-1 corresponds - to the zlib default value (equivalent to GRASS_ZLIB_LEVEL=6). Often - GRASS_ZLIB_LEVEL=1 gives the best compromise between speed and compression. + Valid zlib compression levels are -1 to 9. The GRASS_ZLIB_LEVEL=-1 corresponds + to the zlib default value (equivalent to GRASS_ZLIB_LEVEL=6). Often + GRASS_ZLIB_LEVEL=1 gives the best compromise between speed and compression.

      If the variable doesn't exist, or the value cannot be parsed as an integer, zlib's default compression level 6 will be used.
      @@ -221,29 +227,29 @@

      List of selected (GRASS related) shell environment variables

      [various modules, wxGUI]
      it may be set to either
        -
      • standard - sets percentage output and message - formatting style to standard formatting,
      • -
      • gui - sets percentage output and message formatting - style to GUI formatting,
      • -
      • silent - disables percentage output and error - messages,
      • -
      • plain - sets percentage output and message - formatting style to ASCII output without rewinding control - characters.
      • +
      • standard - sets percentage output and message + formatting style to standard formatting,
      • +
      • gui - sets percentage output and message formatting + style to GUI formatting,
      • +
      • silent - disables percentage output and error + messages,
      • +
      • plain - sets percentage output and message + formatting style to ASCII output without rewinding control + characters.
      GRASS_MOUSE_BUTTON
      [various modules]
      swaps mouse buttons for two-button or left-handed mice. Its value has three digits 1, 2, and 3, which represent default left, middle, and right buttons - respectively. Setting to 132 will swap middle and right + respectively. Setting to 132 will swap middle and right buttons. Note that this variable should be set before a display driver is initialized (e.g., - d.mon x0).
      + d.mon x0).
      GRASS_PAGER
      [various modules]
      - it may be set to either less, more, or cat.
      + it may be set to either less, more, or cat.
      GRASS_PERL
      [used during install process for generating man pages]
      @@ -252,7 +258,7 @@

      List of selected (GRASS related) shell environment variables

      GRASS_PROXY
      [used during addon install/reinstall process for generating man pages (download commit from GitHub API server and remote modules.xml file)]
      - set the proxy with: GRASS_PROXY="http=<value>,ftp=<value>".
      + set the proxy with: GRASS_PROXY="http=<value>,ftp=<value>".
      GRASS_SKIP_MAPSET_OWNER_CHECK
      By default it is not possible to work with MAPSETs that are @@ -272,7 +278,7 @@

      List of selected (GRASS related) shell environment variables

      GRASS_PYTHON
      [wxGUI, Python Ctypes]
      set to override Python executable.
      - On Mac OS X this should be the pythonw executable for the + On Mac OS X this should be the pythonw executable for the wxGUI to work.
      GRASS_VECTOR_LOWMEM
      @@ -315,15 +321,14 @@

      List of selected (GRASS related) shell environment variables

      vector maps will be read (if found) also from this directory. It may be set to either:
        -
      • keep - the temporary vector map is not deleted when - closing the map. -
      • move - the temporary vector map is moved to the +
      • keep - the temporary vector map is not deleted when + closing the map.
      • +
      • move - the temporary vector map is moved to the current mapset when closing the map.
      • -
      • delete - the temporary vector map is deleted when - closing the map. -
      • +
      • delete - the temporary vector map is deleted when + closing the map.
      - Default value is keep. + Default value is keep. Note that temporary vector maps are not visible to the user via g.list @@ -333,7 +338,7 @@

      List of selected (GRASS related) shell environment variables

      GRASS_VECTOR_TMPDIR_MAPSET
      [vectorlib]
      By default GRASS temporary directory is located in - $LOCATION/$MAPSET/.tmp/$HOSTNAME. If GRASS_VECTOR_TMPDIR_MAPSET is + $LOCATION/$MAPSET/.tmp/$HOSTNAME. If GRASS_VECTOR_TMPDIR_MAPSET is set to '0', the temporary directory is located in TMPDIR (environmental variable defined by the user or GRASS initialization script if not given).
      @@ -393,11 +398,11 @@

      List of selected (GRASS related) shell environment variables

      TMPDIR, TEMP, TMP
      [Various GRASS GIS commands and wxGUI]
      - The default wxGUI temporary directory is chosen from a - platform-dependent list, but the user can control the selection of - this directory by setting one of the TMPDIR, TEMP or TMP - environment variables Hence the wxGUI uses $TMPDIR if it is set, - then $TEMP, otherwise /tmp.
      + The default wxGUI temporary directory is chosen from a + platform-dependent list, but the user can control the selection of + this directory by setting one of the TMPDIR, TEMP or TMP + environment variables Hence the wxGUI uses $TMPDIR if it is set, + then $TEMP, otherwise /tmp.

      List of selected GRASS environment variables for rendering

      @@ -474,7 +479,7 @@

      List of selected internal GRASS environment variables

      This variable is automatically created by g.parser so that the - --overwrite option will + --overwrite option will be inherited by dependent modules as the script runs. Setting either the GRASS_OVERWRITE environment variable or the OVERWRITE gisenv variable detailed below will cause maps with identical names to be overwritten. @@ -490,7 +495,7 @@

      List of selected internal GRASS environment variables

    40. 3 - additional verbose messages are printed
    41. This variable is automatically created by g.parser - so that the --verbose or --quiet flags will be inherited + so that the --verbose or --quiet flags will be inherited by dependent modules as the script runs.
      GRASS_REGION
      @@ -502,7 +507,7 @@

      List of selected internal GRASS environment variables

      WIND_OVERRIDE
      [libgis]
      it causes programs to use the specified named region (created with - e.g. g.region save=...) to be used as the current region, instead of + e.g. g.region save=...) to be used as the current region, instead of the region from the WIND file.

      This allows programs such as the GUI to run external commands on an alternate region without having to modify the WIND file then change it @@ -536,7 +541,7 @@

      List of selected GRASS gisenv variables


      process id of the start-up shell script
      GUI
      -
      See GRASS_GUI environmental variable for details.
      +
      See GRASS_GUI environmental variable for details.
      LOCATION
      full path to project (previously called location) directory
      @@ -571,7 +576,7 @@

      List of selected GRASS gisenv variables

      This variable is automatically created by g.parser so that the - --overwrite option will + --overwrite option will be inherited by dependent modules as the script runs. Setting either the GRASS_OVERWRITE environment variable or the OVERWRITE gisenv variable detailed below will cause maps with identical names to be overwritten. @@ -580,27 +585,27 @@

      List of selected GRASS gisenv variables

      GRASS-related Files

      -
      $HOME/.grass8/rc
      +
      $HOME/.grass8/rc
      stores the GRASS gisenv variables (not shell environment variables)
      -
      $HOME/.grass8/bashrc
      +
      $HOME/.grass8/bashrc
      stores the shell environment variables (Bash only)
      -
      $HOME/.grass8/env.bat
      +
      $HOME/.grass8/env.bat
      stores the shell environment variables (MS Windows only)
      -
      $HOME/.grass8/login
      +
      $HOME/.grass8/login
      stores the DBMI passwords in this hidden file. Only the file owner can access this file.
      -
      $HOME/GIS_ERROR_LOG
      +
      $HOME/GIS_ERROR_LOG
      if this file exists then all GRASS error and warning messages are logged here. Applies to current user. To generate the file, use: - touch $HOME/GIS_ERROR_LOG
      + touch $HOME/GIS_ERROR_LOG
      See also GIS_ERROR_LOG variable.
      -Note: On MS Windows the files are stored in %APPDATA%. +Note: On MS Windows the files are stored in %APPDATA%.

      SEE ALSO

      diff --git a/lib/pngdriver/pngdriver.html b/lib/pngdriver/pngdriver.html index d0eab9f4125..f22834c0c2e 100644 --- a/lib/pngdriver/pngdriver.html +++ b/lib/pngdriver/pngdriver.html @@ -41,7 +41,7 @@

      Environment variables

      sets true-color support. Default is TRUE.
    42. GRASS_RENDER_FILE=filename
      - the filename to put the resulting image in, default is map.png. + the filename to put the resulting image in, default is map.png. If you set GRASS_RENDER_FILE to a filename which ends in ".ppm", a PPM file will be created (with alpha channel stored in a PGM image, if applicable). If you set GRASS_RENDER_FILE to a filename which ends in ".bmp", a 32-bpp @@ -51,11 +51,11 @@

      Environment variables

      compression level of PNG files (0 = none, 1 = fastest, 9 = best, default is 6)
    43. GRASS_RENDER_FILE_READ
      - if TRUE, the PNG driver will initialize the image from + if TRUE, the PNG driver will initialize the image from the contents of GRASS_RENDER_FILE.
    44. GRASS_RENDER_FILE_MAPPED
      - if TRUE, the PNG driver + if TRUE, the PNG driver will map GRASS_RENDER_FILE as its framebuffer, rather than using memory. This only works with BMP files.
    45. @@ -72,7 +72,7 @@

      Example

      d.vect roadsmajor color=red
      -This writes a file named map.png in your current directory. +This writes a file named map.png in your current directory.

      NOTES

      @@ -83,14 +83,14 @@

      NOTES

      be installed for the PNG driver to work (it's worth it).

      The resolution of the output images is defined by current region -extents. Use g.region -p to get the number of rows and cols +extents. Use g.region -p to get the number of rows and cols and use the environment variables to set the image size. If you would like a larger image, multiply both rows and cols by the same whole number to preserve the aspect ratio.

      Further PNG file processing (e.g. quantization to 1 bit for monochrome -images) can be done with pnmquant of -the netpbm tools. +images) can be done with pnmquant of +the netpbm tools.

      SEE ALSO

      @@ -99,7 +99,9 @@

      SEE ALSO

      PS driver, HTML driver, variables -

      +
      +

      + d.rast, d.vect, d.mon, diff --git a/lib/psdriver/psdriver.html b/lib/psdriver/psdriver.html index 3cc9bf7404f..da593702444 100644 --- a/lib/psdriver/psdriver.html +++ b/lib/psdriver/psdriver.html @@ -38,16 +38,16 @@

      Environment variables

      fit a standard paper size, see also GRASS_RENDER_WIDTH, GRASS_RENDER_HEIGHT.
    46. GRASS_RENDER_PS_LANDSCAPE
      - if TRUE, the screen is rotated 90 degrees + if TRUE, the screen is rotated 90 degrees counter-clockwise so that a "landscape" screen fits better on "portrait" paper.
    47. GRASS_RENDER_PS_HEADER
      - if FALSE, the output is appended to any existing file, + if FALSE, the output is appended to any existing file, and no prolog or setup sections are generated.
    48. GRASS_RENDER_PS_TRAILER
      - if FALSE, no trailer section is generated. + if FALSE, no trailer section is generated.
    49. @@ -61,12 +61,12 @@

      Example

      d.rast elevation d.vect roadsmajor color=red
      -This writes a file named map.ps in your current directory. +This writes a file named map.ps in your current directory.

      NOTES

      The resolution of the output files is defined by current region -extents. Use g.region -p to get the number of rows and cols +extents. Use g.region -p to get the number of rows and cols and use the environment variables to set the image size. If you would like a larger image, multiply both rows and cols by the same whole number to preserve the aspect ratio. @@ -75,7 +75,7 @@

      NOTES

      colorimage and setrgbcolor operators (this is the case for colour printers which pre-date level 2 PostScript). -

      Masked images (d.rast, d.rgb, d.his -n) +

      Masked images (d.rast, d.rgb, d.his -n) require PostScript level 3.

      SEE ALSO

      @@ -85,7 +85,9 @@

      SEE ALSO

      PNG driver, HTML driver, variables -

      +
      +

      + d.rast, d.vect, d.mon, diff --git a/lib/raster/put_title.c b/lib/raster/put_title.c index 8d0d4f0b43f..346bf12f23d 100644 --- a/lib/raster/put_title.c +++ b/lib/raster/put_title.c @@ -35,6 +35,7 @@ int Rast_put_cell_title(const char *name, const char *title) if (!out) { fclose(in); G_warning(_("G_put_title - can't create a temp file")); + G_free(tempfile); return -1; } @@ -52,12 +53,15 @@ int Rast_put_cell_title(const char *name, const char *title) if (line < 3) { G_warning(_("category information for [%s] in [%s] invalid"), name, mapset); + remove(tempfile); + G_free(tempfile); return -1; } in = fopen(tempfile, "r"); if (!in) { G_warning(_("G_put_title - can't reopen temp file")); + G_free(tempfile); return -1; } @@ -66,6 +70,8 @@ int Rast_put_cell_title(const char *name, const char *title) fclose(in); G_warning(_("can't write category information for [%s] in [%s]"), name, mapset); + remove(tempfile); + G_free(tempfile); return -1; } @@ -75,6 +81,7 @@ int Rast_put_cell_title(const char *name, const char *title) fclose(in); fclose(out); remove(tempfile); + G_free(tempfile); return 1; } diff --git a/lib/raster3d/index.c b/lib/raster3d/index.c index a73e12219c0..6ddbbb083a6 100644 --- a/lib/raster3d/index.c +++ b/lib/raster3d/index.c @@ -37,6 +37,7 @@ static int Rast3d_readIndex(RASTER3D_Map *map) if (indexLength == map->indexLongNbytes * map->nTiles) { if (read(map->data_fd, tmp, indexLength) != indexLength) { Rast3d_error("Rast3d_readIndex: can't read file"); + Rast3d_free(tmp); return 0; } } @@ -52,6 +53,7 @@ static int Rast3d_readIndex(RASTER3D_Map *map) tmp2 = Rast3d_malloc(indexLength); if (tmp2 == NULL) { Rast3d_error("Rast3d_readIndex: error in Rast3d_malloc"); + Rast3d_free(tmp); return 0; } } @@ -60,6 +62,8 @@ static int Rast3d_readIndex(RASTER3D_Map *map) if (read(map->data_fd, tmp2, indexLength) != indexLength) { Rast3d_error("Rast3d_readIndex: can't read file"); + Rast3d_free(tmp); + Rast3d_free(tmp2); return 0; } @@ -117,6 +121,7 @@ int Rast3d_flush_index(RASTER3D_Map *map) indexLength = map->nTiles * sizeof(long); if (write(map->data_fd, tmp, indexLength) != indexLength) { Rast3d_error("Rast3d_flush_index: can't write file"); + Rast3d_free(tmp); return 0; } diff --git a/lib/rst/interp_float/segmen2d.c b/lib/rst/interp_float/segmen2d.c index f2b13951691..1430bf58320 100644 --- a/lib/rst/interp_float/segmen2d.c +++ b/lib/rst/interp_float/segmen2d.c @@ -70,7 +70,7 @@ int IL_interp_segments_2d( static double smseg; int MINPTS; double pr; - struct triple *point; + struct triple *point = NULL; struct triple skip_point; int m_skip, skip_index, j, k, segtest; double xx, yy /*, zz */; @@ -283,8 +283,11 @@ int IL_interp_segments_2d( } else if (segtest == 1) { if (params->matrix_create(params, data->points, - data->n_points - 1, matrix, indx) < 0) + data->n_points - 1, matrix, + indx) < 0) { + G_free(point); return -1; + } } if (!params->cv) { for (i = 0; i < data->n_points; i++) @@ -332,6 +335,7 @@ int IL_interp_segments_2d( G_free(data->points); G_free(data); } + G_free(point); return 1; } diff --git a/lib/vector/Vlib/close.c b/lib/vector/Vlib/close.c index 573d75068b0..7e80e876393 100644 --- a/lib/vector/Vlib/close.c +++ b/lib/vector/Vlib/close.c @@ -98,7 +98,7 @@ int Vect_close(struct Map_info *Map) Vect_copy_map_dblinks(Map, &Out, TRUE); /* afterwords, dblinks must be removed from temporary map otherwise when deleting temporary map also original - attribute tables would be deteled */ + attribute tables would be deleted */ Vect_map_del_dblink(Map, -1); /* delete db links for all layers */ if (0 != Vect_copy_map_lines_field( diff --git a/lib/vector/Vlib/dbcolumns.c b/lib/vector/Vlib/dbcolumns.c index e3cf915a9ec..f0c6f5bc514 100644 --- a/lib/vector/Vlib/dbcolumns.c +++ b/lib/vector/Vlib/dbcolumns.c @@ -154,7 +154,7 @@ const char *Vect_get_column_names_types(struct Map_info *Map, int field) dbHandle handle; dbString table_name; dbTable *table; - const char **col_type_names; + char **col_type_names; char *list; num_dblinks = Vect_get_num_dblinks(Map); @@ -180,16 +180,21 @@ const char *Vect_get_column_names_types(struct Map_info *Map, int field) ncols = db_get_table_number_of_columns(table); col_type_names = G_malloc(ncols * sizeof(char *)); for (col = 0; col < ncols; col++) { - char buf[256]; + col_type_names[col] = (char *)G_calloc(256, sizeof(char)); - sprintf(buf, "%s(%s)", + sprintf(col_type_names[col], "%s(%s)", db_get_column_name(db_get_table_column(table, col)), db_sqltype_name( db_get_column_sqltype(db_get_table_column(table, col)))); - col_type_names[col] = buf; } - if ((list = G_str_concat(col_type_names, ncols, ",", BUFF_MAX)) == NULL) + + if ((list = G_str_concat((const char **)col_type_names, ncols, ",", + BUFF_MAX)) == NULL) list = G_store(""); + + for (col = 0; col < ncols; col++) { + G_free(col_type_names[col]); + } G_free(col_type_names); G_debug(3, "%s", list); diff --git a/lib/vector/Vlib/remove_duplicates.c b/lib/vector/Vlib/remove_duplicates.c index e15f7ccedb3..6fd90166a85 100644 --- a/lib/vector/Vlib/remove_duplicates.c +++ b/lib/vector/Vlib/remove_duplicates.c @@ -197,6 +197,11 @@ void Vect_remove_duplicates(struct Map_info *Map, int type, } } G_verbose_message(_("Removed duplicates: %d"), ndupl); + Vect_destroy_line_struct(APoints); + Vect_destroy_line_struct(BPoints); + Vect_destroy_cats_struct(ACats); + Vect_destroy_cats_struct(BCats); + Vect_destroy_boxlist(List); } /*! diff --git a/lib/vector/Vlib/write_sfa.c b/lib/vector/Vlib/write_sfa.c index 75754dd7645..3b7f1af6339 100644 --- a/lib/vector/Vlib/write_sfa.c +++ b/lib/vector/Vlib/write_sfa.c @@ -339,6 +339,7 @@ void V2__add_line_to_topo_sfa(struct Map_info *Map, int line, G_debug(3, "V2__add_line_to_topo_sfa(): line = %d npoints = %d", line, points->n_points); + first = TRUE; plus = &(Map->plus); Line = plus->Line[line]; type = Line->type; diff --git a/lib/vector/rtree/docs/sources.htm b/lib/vector/rtree/docs/sources.htm index 31e076775ff..7e597c4dba7 100644 --- a/lib/vector/rtree/docs/sources.htm +++ b/lib/vector/rtree/docs/sources.htm @@ -103,7 +103,7 @@

      Implementation of a simple C++ quaternion class called "Squat". Popularized by a seminal paper by Ken Shoemake, a quaternion represents a rotation about an axis.  Squats can be concatenated together via -the * and *= +the * and *= operators and converted back and forth between transformation matrices. Implementation also includes a wonderful 3D vector macro library by Don Hatch. diff --git a/lib/vector/vectorascii.html b/lib/vector/vectorascii.html index 0fd6fbf5444..ed22e4e2200 100644 --- a/lib/vector/vectorascii.html +++ b/lib/vector/vectorascii.html @@ -43,7 +43,7 @@ [ LAYER CATEGORY] -Everything above in [ ] is optional. +Everything above in [ ] is optional.

      The primitive codes are as follows:

        @@ -76,18 +76,18 @@

        Latitude/Longitude data may be given in a number of ways. Decimal degrees must be positive or negative instead of using a hemisphere letter. Mixed coordinates must use a hemisphere letter. Whole minutes and -seconds must always contain two digits (example: use 167:03:04.567; -and not 167:3:4.567). +seconds must always contain two digits (example: use 167:03:04.567; +and not 167:3:4.567).

        Acceptable formats:
        key: D=Degrees; M=Minutes; S=Seconds; h=Hemisphere (N,S,E,W)

          -
        • (+/-)DDD.DDDDD -
        • DDDh -
        • DDD:MMh -
        • DDD:MM.MMMMMh -
        • DDD:MM:SSh -
        • DDD:MM:SS.SSSSSh +
        • (+/-)DDD.DDDDD
        • +
        • DDDh
        • +
        • DDD:MMh
        • +
        • DDD:MM.MMMMMh
        • +
        • DDD:MM:SSh
        • +
        • DDD:MM:SS.SSSSSh

        EXAMPLES

        @@ -132,5 +132,6 @@

        SEE ALSO

        v.in.ascii, v.out.ascii, - v.edit + v.edit, + v.support diff --git a/locale/po/grassmods_ar.po b/locale/po/grassmods_ar.po index cbb43ec6e9e..412f7783428 100644 --- a/locale/po/grassmods_ar.po +++ b/locale/po/grassmods_ar.po @@ -261,7 +261,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -553,20 +553,20 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 #, fuzzy msgid "My first raster module" msgstr "اسم الخريطة الراسترية الموجودة" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -620,7 +620,7 @@ msgstr "اسم الخريطة الراسترية الموجودة" msgid "Raster map <%s> not found" msgstr "غير موجودة <%s> الخريطة الراسترية " -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -805,12 +805,12 @@ msgstr "غير موجودة <%s> الخريطة الراسترية " msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 #, fuzzy msgid "My first vector module" msgstr "اسم الخريطة الراسترية الموجودة" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -832,13 +832,13 @@ msgstr "اسم الخريطة الراسترية الموجودة" msgid "Vector map <%s> not found" msgstr "لم توجد<%s>الخريطة الفيكتورية" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 #, fuzzy msgid "Unable to set predetermined vector open level" msgstr "اسم خريطة النقط الفيكتورية المخرجة" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -910,7 +910,7 @@ msgstr "اسم خريطة النقط الفيكتورية المخرجة" msgid "Unable to open vector map <%s>" msgstr "[%s]لم يتم فتح ملف الخلية ل " -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -965,7 +965,7 @@ msgstr "[%s]لم يتم فتح ملف الخلية ل " msgid "Unable to create vector map <%s>" msgstr "اسم الخريطة الراسترية الناتجة" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -981,7 +981,7 @@ msgstr "اسم الخريطة الراسترية الناتجة" msgid "Database connection not defined for layer %d" msgstr "لم يتم الإتصال بقاعدة البيانات\n" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -995,7 +995,7 @@ msgstr "لم يتم الإتصال بقاعدة البيانات\n" msgid "Unable to start driver <%s>" msgstr "'%s'لم يمكن بدء المشغل" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1046,7 +1046,7 @@ msgstr "'%s'لم يمكن بدء المشغل" msgid "Unable to open database <%s> by driver <%s>" msgstr "%s بالمشغل %s لم يمكن فتح قاعدة البيانات" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1061,17 +1061,17 @@ msgstr "%s بالمشغل %s لم يمكن فتح قاعدة البيانات" msgid "Unable to describe table <%s>" msgstr "لم يمكن وصف الجدول" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, fuzzy, c-format msgid "Unable to get attribute data for cat %d" msgstr "لم يمكن اختيار البيانات من الجدول" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, fuzzy, c-format msgid "Error while retrieving database record for cat %d" msgstr "cat = %d لا يوجد تسجيل للخط" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, fuzzy, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "[%s]لم يتم فتح ملف الخلية ل " @@ -35230,7 +35230,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -40743,7 +40743,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -51689,7 +51689,7 @@ msgstr "كتابة ملف جديد...\n" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -73052,11 +73052,11 @@ msgstr "اسم خريطة الراستر المخرجة" #~ msgstr "ملف المدخل" #, fuzzy -#~ msgid "Numpy array with vector cats." +#~ msgid "NumPy array with vector cats." #~ msgstr "%d نقطة كتبت للمخرج\n" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "لاتشمل اسماء الأعمدة في المخرج" #, fuzzy diff --git a/locale/po/grassmods_bn.po b/locale/po/grassmods_bn.po index f3526d45180..9e4c7bb8280 100644 --- a/locale/po/grassmods_bn.po +++ b/locale/po/grassmods_bn.po @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32646,7 +32646,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37639,7 +37639,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47629,7 +47629,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_cs.po b/locale/po/grassmods_cs.po index 0d17c962990..8968a2270cf 100644 --- a/locale/po/grassmods_cs.po +++ b/locale/po/grassmods_cs.po @@ -264,7 +264,7 @@ msgstr "" "ovladač: %s\n" "databáze: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -556,19 +556,19 @@ msgstr "" msgid "raster" msgstr "rastr" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "Můj první modul pro zpracování rastrových dat" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -622,7 +622,7 @@ msgstr "Můj první modul pro zpracování rastrových dat" msgid "Raster map <%s> not found" msgstr "Rastrová mapa <%s> nenalezena" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -807,11 +807,11 @@ msgstr "Rastrová mapa <%s> nenalezena" msgid "vector" msgstr "vektor" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "Můj první modul pro zpracování vektorových dat" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -833,12 +833,12 @@ msgstr "Můj první modul pro zpracování vektorových dat" msgid "Vector map <%s> not found" msgstr "Vektorová mapa <%s> nebyla nalezena" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "Nelze nastavit předvybranou vektorovou mapu na úrovni otevření" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -910,7 +910,7 @@ msgstr "Nelze nastavit předvybranou vektorovou mapu na úrovni otevření" msgid "Unable to open vector map <%s>" msgstr "Nelze otevřít vektorovou mapu <%s>" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -965,7 +965,7 @@ msgstr "Nelze otevřít vektorovou mapu <%s>" msgid "Unable to create vector map <%s>" msgstr "Nelze vytvořit vektorovou mapu <%s>" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -981,7 +981,7 @@ msgstr "Nelze vytvořit vektorovou mapu <%s>" msgid "Database connection not defined for layer %d" msgstr "Spojení s databází nebylo definováno pro vrstvu %d " -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -995,7 +995,7 @@ msgstr "Spojení s databází nebylo definováno pro vrstvu %d " msgid "Unable to start driver <%s>" msgstr "Nelze spustit ovladač <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1046,7 +1046,7 @@ msgstr "Nelze spustit ovladač <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "Nelze otevřít databázi <%s> ovladačem <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1061,17 +1061,17 @@ msgstr "Nelze otevřít databázi <%s> ovladačem <%s>" msgid "Unable to describe table <%s>" msgstr "Nelze popsat tabulku <%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "Nelze získat atributová data pro cat= %d" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "Chyba během získání záznamu databáze pro kategorii %d" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "Nelze kopírovat atributovou tabulku do vektorové mapy <%s>" @@ -33469,7 +33469,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38799,7 +38799,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -49461,7 +49461,7 @@ msgstr "Exportují se prvky s kategorií..." #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -69845,7 +69845,7 @@ msgstr "" #~ msgstr "Číslo vrstvy (to)" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "Příliš mnoho názvů sloupců" #, fuzzy diff --git a/locale/po/grassmods_de.po b/locale/po/grassmods_de.po index 7e994f4b379..48ffaefbcb6 100644 --- a/locale/po/grassmods_de.po +++ b/locale/po/grassmods_de.po @@ -259,7 +259,7 @@ msgstr "" "Treiber: %s\n" "Datenbank: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -551,19 +551,19 @@ msgstr "" msgid "raster" msgstr "Raster" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "Schlagwort2" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "Schlagwort3" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "Mein erstes Rastermodul." -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -617,7 +617,7 @@ msgstr "Mein erstes Rastermodul." msgid "Raster map <%s> not found" msgstr "Rasterkarte <%s> konnte nicht gefunden werden." -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -802,11 +802,11 @@ msgstr "Rasterkarte <%s> konnte nicht gefunden werden." msgid "vector" msgstr "Vektor" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "Mein erstes Vektormodul." -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -828,12 +828,12 @@ msgstr "Mein erstes Vektormodul." msgid "Vector map <%s> not found" msgstr "Vektorkarte <%s> nicht gefunden." -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "Kann das vorherbestimmte Vektoröffnungslevel nicht setzen." -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -905,7 +905,7 @@ msgstr "Kann das vorherbestimmte Vektoröffnungslevel nicht setzen." msgid "Unable to open vector map <%s>" msgstr "Kann die Vektorkarte <%s> nicht öffnen." -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -960,7 +960,7 @@ msgstr "Kann die Vektorkarte <%s> nicht öffnen." msgid "Unable to create vector map <%s>" msgstr "Kann die Vektorkarte <%s> nicht erzeugen." -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -976,7 +976,7 @@ msgstr "Kann die Vektorkarte <%s> nicht erzeugen." msgid "Database connection not defined for layer %d" msgstr "Die Datenbankverbindung für den Layer<%d> ist nicht definiert." -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -990,7 +990,7 @@ msgstr "Die Datenbankverbindung für den Layer<%d> ist nicht definiert." msgid "Unable to start driver <%s>" msgstr "Kann den Treiber <%s> nicht starten." -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1041,7 +1041,7 @@ msgstr "Kann den Treiber <%s> nicht starten." msgid "Unable to open database <%s> by driver <%s>" msgstr "Kann Datenbank <%s> nicht mit dem Treiber <%s> öffnen." -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1056,17 +1056,17 @@ msgstr "Kann Datenbank <%s> nicht mit dem Treiber <%s> öffnen." msgid "Unable to describe table <%s>" msgstr "Kann Tabelle <%s> nicht beschreiben." -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "Kann die Attribute für cat %d nicht bekommen." -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "Fehler beim Abfragen des Datenbankeintrags für cat %d" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "Kann die Attributtabelle nicht in die Vektorkarte <%s> kopieren." @@ -33833,7 +33833,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -39230,7 +39230,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Löst Grenzen zwischen benachbarten Flächen mit gleicher Kategorienummer oder " @@ -50003,7 +50003,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -70489,11 +70489,11 @@ msgstr "" #~ msgstr "Layer-Nummer für Flächen-Karte" #, fuzzy -#~ msgid "Numpy array with vector cats." +#~ msgid "NumPy array with vector cats." #~ msgstr "Keine Flächen in Vektorkarte <%s>" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "Zu viele Spaltennamen." #, fuzzy diff --git a/locale/po/grassmods_el.po b/locale/po/grassmods_el.po index e183723b634..45b4e6e66e5 100644 --- a/locale/po/grassmods_el.po +++ b/locale/po/grassmods_el.po @@ -258,7 +258,7 @@ msgstr "" "driver: %s\n" "database: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -550,19 +550,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -616,7 +616,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "Ο χάρτης raster <%s> δεν βρέθηκε." -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -801,11 +801,11 @@ msgstr "Ο χάρτης raster <%s> δεν βρέθηκε." msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -827,12 +827,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -904,7 +904,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -959,7 +959,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -975,7 +975,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -989,7 +989,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "Αδυναμία εκκίνησης του οδηγού <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1040,7 +1040,7 @@ msgstr "Αδυναμία εκκίνησης του οδηγού <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1055,17 +1055,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "Αδυναμία περιγραφής πίνακα <%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32820,7 +32820,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37938,7 +37938,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48176,7 +48176,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_es.po b/locale/po/grassmods_es.po index ca8a6727270..dcd927332fa 100644 --- a/locale/po/grassmods_es.po +++ b/locale/po/grassmods_es.po @@ -271,7 +271,7 @@ msgstr "" "driver: %s\n" "base de datos: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -563,19 +563,19 @@ msgstr "" msgid "raster" msgstr "ráster" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "palabra clave 2 " -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "palabra clave 3" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "Mi primer módulo ráster" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -629,7 +629,7 @@ msgstr "Mi primer módulo ráster" msgid "Raster map <%s> not found" msgstr "Mapa ráster <%s> no encontrado" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -814,11 +814,11 @@ msgstr "Mapa ráster <%s> no encontrado" msgid "vector" msgstr "vectorial" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "Mi primer módulo vectorial" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -840,13 +840,13 @@ msgstr "Mi primer módulo vectorial" msgid "Vector map <%s> not found" msgstr "Mapa vectorial <%s> no encontrado" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" "No ha sido posible establecer nivel predeterminado de apertura de vectorial" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -918,7 +918,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "No se puede abrir el mapa vectorial <%s>" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -973,7 +973,7 @@ msgstr "No se puede abrir el mapa vectorial <%s>" msgid "Unable to create vector map <%s>" msgstr "No se puede crear el mapa vectorial <%s>" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -989,7 +989,7 @@ msgstr "No se puede crear el mapa vectorial <%s>" msgid "Database connection not defined for layer %d" msgstr "La conexión a la base de datos no ha sido definida para la capa %d" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -1003,7 +1003,7 @@ msgstr "La conexión a la base de datos no ha sido definida para la capa %d" msgid "Unable to start driver <%s>" msgstr "No se puede iniciar el controlador <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1054,7 +1054,7 @@ msgstr "No se puede iniciar el controlador <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "No se puede abrir la base de datos <%s> por el controlador <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1069,17 +1069,17 @@ msgstr "No se puede abrir la base de datos <%s> por el controlador <%s>" msgid "Unable to describe table <%s>" msgstr "No se puede describir la tabla<%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "No ha sido posible obtener datos para categoría %d" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "Error al recuperar registro de base de datos para categoría %d" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "No es posible copiar tabla de atributos al mapa vectorial <%s>" @@ -34627,7 +34627,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -40162,7 +40162,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Disuelve contornos entre áreas adyacentes que comparten un número de " @@ -51266,7 +51266,7 @@ msgstr "Conviritiendo reglas de color en categorías..." #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -57136,7 +57136,7 @@ msgstr "categoría del elemento más cercano" #: ../vector/v.distance/main.c:202 msgid "minimum distance to nearest feature" -msgstr "distancia máxima al punto más cercano" +msgstr "distancia mínima al punto más cercano" #: ../vector/v.distance/main.c:203 msgid "x coordinate of the nearest point on the 'to' feature" @@ -72203,11 +72203,11 @@ msgstr "" #~ msgstr "Número o nombre de capa (write to)" #, fuzzy -#~ msgid "Numpy array with vector cats." +#~ msgid "NumPy array with vector cats." #~ msgstr "No hay áreas en mapa vectorial <%s>" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "Demasiados nombres de columna" #, fuzzy diff --git a/locale/po/grassmods_fi.po b/locale/po/grassmods_fi.po index 692d9c0d9f2..dba5dfc58ea 100644 --- a/locale/po/grassmods_fi.po +++ b/locale/po/grassmods_fi.po @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32664,7 +32664,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37662,7 +37662,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47677,7 +47677,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_fr.po b/locale/po/grassmods_fr.po index 8f4602fc6ec..ba09f1d912c 100644 --- a/locale/po/grassmods_fr.po +++ b/locale/po/grassmods_fr.po @@ -267,7 +267,7 @@ msgstr "" "pilote : %s\n" "base : %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -559,19 +559,19 @@ msgstr "" msgid "raster" msgstr "raster" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "Mon module raster de départ" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -625,7 +625,7 @@ msgstr "Mon module raster de départ" msgid "Raster map <%s> not found" msgstr "Carte raster <%s> non trouvée" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -810,11 +810,11 @@ msgstr "Carte raster <%s> non trouvée" msgid "vector" msgstr "vecteur" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "Mon module vecteur de départ." -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -836,12 +836,12 @@ msgstr "Mon module vecteur de départ." msgid "Vector map <%s> not found" msgstr "Carte vecteur <%s> non trouvée" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -913,7 +913,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "Impossible d'ouvrir la carte vecteur <%s>" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -968,7 +968,7 @@ msgstr "Impossible d'ouvrir la carte vecteur <%s>" msgid "Unable to create vector map <%s>" msgstr "Impossible de créer la carte vecteur <%s>" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -985,7 +985,7 @@ msgid "Database connection not defined for layer %d" msgstr "" "La connexion à la base de données n'a pas été définie pour la couche %d" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -999,7 +999,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "Impossible de lancer le pilote <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1050,7 +1050,7 @@ msgstr "Impossible de lancer le pilote <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "Impossible d'ouvrir la base de données <%s> avec le pilote <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1065,17 +1065,17 @@ msgstr "Impossible d'ouvrir la base de données <%s> avec le pilote <%s>" msgid "Unable to describe table <%s>" msgstr "Impossible de décrire la table <%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "Impossible de sélectionner les attributs pour cat = %d" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "Aucun enregistrement pour la catégorie cat = %d" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "Impossible de copier la table d'attributs vers la carte vecteur <%s>" @@ -1578,8 +1578,8 @@ msgid "" "Unable to convert input map coordinate reference system to GRASS format; " "cannot create new project." msgstr "" -"Impossible de convertir la projection de la couche en entrée au format GRASS " -"; impossible de créer le nouveau projet" +"Impossible de convertir la projection de la couche en entrée au format " +"GRASS ; impossible de créer le nouveau projet" #: ../raster/r.external/proj.c:140 ../raster/r.in.gdal/main.c:1035 #: ../raster/r.in.gdal/proj.c:139 ../vector/v.in.lidar/main.c:522 @@ -1638,7 +1638,7 @@ msgstr "Le PROJ_INFO du jeu de données est : \n" #: ../raster/r.in.lidar/projection.c:89 ../raster3d/r3.in.lidar/projection.c:90 #: ../vector/v.in.lidar/projection.c:87 ../vector/v.external/proj.c:360 #: ../vector/v.in.pdal/projection.c:87 ../vector/v.in.ogr/proj.c:492 -#, c-format, fuzzy +#, fuzzy, c-format msgid "" "\n" "In case of no significant differences in the coordinate reference system " @@ -4444,7 +4444,7 @@ msgid "unable to open input file <%s>" msgstr "" #: ../raster/r.quant/read_rules.c:89 -#, c-format, fuzzy +#, fuzzy, c-format msgid "" "\n" "Enter the rule or 'help' for the format description or 'end' to exit:\n" @@ -4561,7 +4561,7 @@ msgid "Failed to set raster attribute table" msgstr "" #: ../raster/r.out.gdal/main.c:51 -#, c-format, fuzzy +#, fuzzy, c-format msgid "Supported formats:\n" msgstr "Formats supportés :\n" @@ -12802,7 +12802,7 @@ msgid "Amount of memory cannot be negative." msgstr "Maximum de mémoire à utiliser (en MB)" #: ../raster/r.viewshed/main.cpp:616 -#, c-format, fuzzy +#, fuzzy, c-format msgid " Converting %d to %d MB" msgstr " Conversion de %d en %d MB" @@ -23481,12 +23481,12 @@ msgid "Region%s" msgstr "" #: ../imagery/i.cluster/main.c:261 -#, c-format, fuzzy +#, fuzzy, c-format msgid " North: %12.2f East: %12.2f%s" msgstr " Nord : %12.2f Est : %12.2f%s" #: ../imagery/i.cluster/main.c:263 -#, c-format, fuzzy +#, fuzzy, c-format msgid " South: %12.2f West: %12.2f%s" msgstr " Sud : %12.2f Ouest : %12.2f%s" @@ -23496,7 +23496,7 @@ msgid " Res: %12.2f Res: %12.2f%s" msgstr "" #: ../imagery/i.cluster/main.c:267 -#, c-format, fuzzy +#, fuzzy, c-format msgid " Rows: %12d Cols: %12d Cells: %d%s" msgstr " Lignes : %12d Colonnes : %12d Cellules : %d%s" @@ -23526,17 +23526,17 @@ msgid " Minimum class size: %d%s" msgstr "" #: ../imagery/i.cluster/main.c:278 -#, c-format, fuzzy +#, fuzzy, c-format msgid " Minimum class separation: %f%s" msgstr " Séparation minimale des classes : %f%s" #: ../imagery/i.cluster/main.c:280 -#, c-format, fuzzy +#, fuzzy, c-format msgid " Percent convergence: %f%s" msgstr " Pourcentage de convergence : %f%s" #: ../imagery/i.cluster/main.c:282 -#, c-format, fuzzy +#, fuzzy, c-format msgid " Maximum number of iterations: %d%s" msgstr " Nombre maximum d'itérations : %d%s" @@ -28076,7 +28076,7 @@ msgid "Lists all database drivers." msgstr "Lister tous les pilotes de bases de données." #: ../db/db.execute/main.c:55 ../db/db.select/main.c:62 -#, c-format, fuzzy +#, fuzzy, c-format msgid "Unable to open file <%s>: %s" msgstr "Impossible d'ouvrir le fichier <%s> : %s" @@ -28981,7 +28981,7 @@ msgid "The password was stored in file (%s%cdblogin)" msgstr "" #: ../db/db.select/main.c:104 -#, c-format, fuzzy +#, fuzzy, c-format msgid "Test %s." msgstr "Test %s." @@ -33949,7 +33949,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -39447,7 +39447,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Fusionne des contours entre surfaces adjacentes partageant un attribut ou " @@ -50333,7 +50333,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -57802,7 +57802,7 @@ msgstr "" #: ../vector/v.vol.rst/user1.c:586 ../vector/v.vol.rst/user1.c:640 #: ../vector/v.vol.rst/user1.c:694 ../vector/v.vol.rst/user1.c:748 #: ../vector/v.vol.rst/user1.c:802 -#, c-format, fuzzy +#, fuzzy, c-format msgid "Error closing output file %s" msgstr "Erreur en fermant le fichier de sortie %s" @@ -66437,7 +66437,7 @@ msgid "" msgstr "" #: ../display/d.vect.thematic/main.c:515 -#, c-format, fuzzy +#, fuzzy, c-format msgid "" "\n" "Total number of records: %.0f\n" diff --git a/locale/po/grassmods_hu.po b/locale/po/grassmods_hu.po index ee0f0929c11..1833a1717a2 100644 --- a/locale/po/grassmods_hu.po +++ b/locale/po/grassmods_hu.po @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "raszter" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "Nem tudom elindítani a <%s> meghajtót" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "Nem tudom elindítani a <%s> meghajtót" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32714,7 +32714,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37770,7 +37770,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47888,7 +47888,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_id_ID.po b/locale/po/grassmods_id_ID.po index d9a39bafe24..ab373e47cd5 100644 --- a/locale/po/grassmods_id_ID.po +++ b/locale/po/grassmods_id_ID.po @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32626,7 +32626,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37619,7 +37619,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47609,7 +47609,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_it.po b/locale/po/grassmods_it.po index 94368222e52..2ee80f9be6d 100644 --- a/locale/po/grassmods_it.po +++ b/locale/po/grassmods_it.po @@ -265,7 +265,7 @@ msgstr "" "driver: %s\n" "database: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -557,19 +557,19 @@ msgstr "" msgid "raster" msgstr "raster" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "Il mio primo modulo raster" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -623,7 +623,7 @@ msgstr "Il mio primo modulo raster" msgid "Raster map <%s> not found" msgstr "Mappa raster <%s> non trovata" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -808,11 +808,11 @@ msgstr "Mappa raster <%s> non trovata" msgid "vector" msgstr "vettoriale" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "Il mio primo modulo vettoriale" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -834,12 +834,12 @@ msgstr "Il mio primo modulo vettoriale" msgid "Vector map <%s> not found" msgstr "Mappa vettoriale <%s> non trovata" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "Impossibile impostare il predeterminato livello del vettoriale aperto" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -911,7 +911,7 @@ msgstr "Impossibile impostare il predeterminato livello del vettoriale aperto" msgid "Unable to open vector map <%s>" msgstr "Impossibile aprire la mappa vettoriale <%s>" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -966,7 +966,7 @@ msgstr "Impossibile aprire la mappa vettoriale <%s>" msgid "Unable to create vector map <%s>" msgstr "Non è possibile creare la mappa vettoriale <%s>" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -982,7 +982,7 @@ msgstr "Non è possibile creare la mappa vettoriale <%s>" msgid "Database connection not defined for layer %d" msgstr "Connessione al database non definita per il layer %d" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -996,7 +996,7 @@ msgstr "Connessione al database non definita per il layer %d" msgid "Unable to start driver <%s>" msgstr "Impossibile avviare il driver <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1047,7 +1047,7 @@ msgstr "Impossibile avviare il driver <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "Impossibile aprire il database <%s> col driver <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1062,17 +1062,17 @@ msgstr "Impossibile aprire il database <%s> col driver <%s>" msgid "Unable to describe table <%s>" msgstr "Impossibile descrivere la tabella <%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "Impossibile ottenere gli attributi per la categoria %d" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "Errore ricevendo il record del database per la categoria %d" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -33636,7 +33636,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38964,7 +38964,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Dissolve i confini tra aree adiacenti che condividono categorie o attributi " @@ -49678,7 +49678,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -69850,7 +69850,7 @@ msgstr "" #~ msgstr "Numero o nome del layer 'to'" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "Troppi nomi di colonna" #, fuzzy diff --git a/locale/po/grassmods_ja.po b/locale/po/grassmods_ja.po index 0dd3735b064..236287ce5e5 100644 --- a/locale/po/grassmods_ja.po +++ b/locale/po/grassmods_ja.po @@ -259,7 +259,7 @@ msgstr "" "ドライバー: %s\n" "データベース: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -551,19 +551,19 @@ msgstr "" msgid "raster" msgstr "ラスター" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "自分の最初のラスターモジュール" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -617,7 +617,7 @@ msgstr "自分の最初のラスターモジュール" msgid "Raster map <%s> not found" msgstr "ラスタマップ <%s> が見つかりません" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -802,11 +802,11 @@ msgstr "ラスタマップ <%s> が見つかりません" msgid "vector" msgstr "ベクトル" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "自分の最初のベクトルモジュール" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -828,12 +828,12 @@ msgstr "自分の最初のベクトルモジュール" msgid "Vector map <%s> not found" msgstr "ベクトルマップ <%s> が見つかりません" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "既設のベクトル オープン レベルを設定できません" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -905,7 +905,7 @@ msgstr "既設のベクトル オープン レベルを設定できません" msgid "Unable to open vector map <%s>" msgstr "ベクトルマップ <%s> を開けません" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -960,7 +960,7 @@ msgstr "ベクトルマップ <%s> を開けません" msgid "Unable to create vector map <%s>" msgstr "ベクトルマップ <%s> を作成できません" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -976,7 +976,7 @@ msgstr "ベクトルマップ <%s> を作成できません" msgid "Database connection not defined for layer %d" msgstr "データベース接続はレイヤー %d に定義されていません" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -990,7 +990,7 @@ msgstr "データベース接続はレイヤー %d に定義されていませ msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1041,7 +1041,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "ドライバー <%s> でデータベース <%s> を開けません" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1056,17 +1056,17 @@ msgstr "ドライバー <%s> でデータベース <%s> を開けません" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "カテゴリー %d の属性データを取得できません" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "カテゴリー %d のデータベース レコード取得中にエラー" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "属性テーブルをベクトルマップ <%s> へコピーできません" @@ -33121,7 +33121,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38412,7 +38412,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -49018,7 +49018,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -68931,7 +68931,7 @@ msgstr "" #~ msgstr "レイヤー数 (from)" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "列名が多すぎます " #, fuzzy diff --git a/locale/po/grassmods_ko.po b/locale/po/grassmods_ko.po index 8d255c6f5ad..ac9b25d1df9 100644 --- a/locale/po/grassmods_ko.po +++ b/locale/po/grassmods_ko.po @@ -250,7 +250,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -542,19 +542,19 @@ msgstr "" msgid "raster" msgstr "래스터" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -608,7 +608,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "래스터 지도 <%s>를 찾을 수 없습니다" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -793,11 +793,11 @@ msgstr "래스터 지도 <%s>를 찾을 수 없습니다" msgid "vector" msgstr "벡터" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -819,12 +819,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "미리 결정된 벡터 열기 단계를 설정할 수 없습니다" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -896,7 +896,7 @@ msgstr "미리 결정된 벡터 열기 단계를 설정할 수 없습니다" msgid "Unable to open vector map <%s>" msgstr "벡터 지도 <%s>를 열 수 없습니다" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -951,7 +951,7 @@ msgstr "벡터 지도 <%s>를 열 수 없습니다" msgid "Unable to create vector map <%s>" msgstr "벡터 지도 <%s>를 생성할 수 없습니다" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -967,7 +967,7 @@ msgstr "벡터 지도 <%s>를 생성할 수 없습니다" msgid "Database connection not defined for layer %d" msgstr "레이어 %d를 위한 데이터베이스 연결이 정의되지 않았습니다" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -981,7 +981,7 @@ msgstr "레이어 %d를 위한 데이터베이스 연결이 정의되지 않았 msgid "Unable to start driver <%s>" msgstr "드라이버 <%s>를 시작할 수 없습니다" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1032,7 +1032,7 @@ msgstr "드라이버 <%s>를 시작할 수 없습니다" msgid "Unable to open database <%s> by driver <%s>" msgstr "데이터베이스 <%s>를 드라이버 <%s>로 열 수 없습니다" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1047,17 +1047,17 @@ msgstr "데이터베이스 <%s>를 드라이버 <%s>로 열 수 없습니다" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "cat %d를 위한 속성 자료를 얻을 수 없습니다" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "속성 테이블을 벡터 지도 <%s>로 복사할 수 없습니다" @@ -33006,7 +33006,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38236,7 +38236,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48700,7 +48700,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_lv.po b/locale/po/grassmods_lv.po index 43e408d8721..2fb56f36516 100644 --- a/locale/po/grassmods_lv.po +++ b/locale/po/grassmods_lv.po @@ -254,7 +254,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -546,19 +546,19 @@ msgstr "" msgid "raster" msgstr "rastrs" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -612,7 +612,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -797,11 +797,11 @@ msgstr "" msgid "vector" msgstr "vektors" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -823,12 +823,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -900,7 +900,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -955,7 +955,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -971,7 +971,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -985,7 +985,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1036,7 +1036,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1051,17 +1051,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, fuzzy, c-format msgid "Unable to get attribute data for cat %d" msgstr "Nevar iatlasīt datus no tabulas" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -33073,7 +33073,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38284,7 +38284,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48739,7 +48739,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -68905,7 +68905,7 @@ msgstr "Izejas rastra kartes nosaukums" #~ msgstr "Slāņa numurs vai nosaukums" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "Atslēga (id) kolonas nosaukums" #, fuzzy diff --git a/locale/po/grassmods_ml.po b/locale/po/grassmods_ml.po index a3693866979..e03bdff269f 100644 --- a/locale/po/grassmods_ml.po +++ b/locale/po/grassmods_ml.po @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32646,7 +32646,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37639,7 +37639,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47629,7 +47629,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_pl.po b/locale/po/grassmods_pl.po index dccca2f5d0f..7d966975e22 100644 --- a/locale/po/grassmods_pl.po +++ b/locale/po/grassmods_pl.po @@ -257,7 +257,7 @@ msgstr "" "sterownik: %s\n" "baza danych: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -549,19 +549,19 @@ msgstr "" msgid "raster" msgstr "raster" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "Mój pierwszy moduł rastrowy" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -615,7 +615,7 @@ msgstr "Mój pierwszy moduł rastrowy" msgid "Raster map <%s> not found" msgstr "Mapa rastrowa <%s> nie została znaleziona" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -800,11 +800,11 @@ msgstr "Mapa rastrowa <%s> nie została znaleziona" msgid "vector" msgstr "wektor" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "Mój pierwszy moduł wektorowy" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -826,12 +826,12 @@ msgstr "Mój pierwszy moduł wektorowy" msgid "Vector map <%s> not found" msgstr "Nie znaleziono mapy wektorowej <%s>" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -903,7 +903,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "Nie można otworzyć mapy wektorowej <%s>" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -958,7 +958,7 @@ msgstr "Nie można otworzyć mapy wektorowej <%s>" msgid "Unable to create vector map <%s>" msgstr "Nie można utworzyć mapy wektorowej <%s>" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -974,7 +974,7 @@ msgstr "Nie można utworzyć mapy wektorowej <%s>" msgid "Database connection not defined for layer %d" msgstr "Połączenie z bazą danych nie zostało zdefiniowane dla warstwy %d" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -988,7 +988,7 @@ msgstr "Połączenie z bazą danych nie zostało zdefiniowane dla warstwy %d" msgid "Unable to start driver <%s>" msgstr "Nie można uruchomić sterownika <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1039,7 +1039,7 @@ msgstr "Nie można uruchomić sterownika <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "Nie można otworzyć bazy danych <%s> za pomocą sterownika <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1054,17 +1054,17 @@ msgstr "Nie można otworzyć bazy danych <%s> za pomocą sterownika <%s>" msgid "Unable to describe table <%s>" msgstr "Nie można opisać tabeli <%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "Nie można wybrać atrybutów dla cat %d" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "Nie można skopiować tabeli atrybutów do mapy wektorowej <%s>" @@ -33222,7 +33222,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38510,7 +38510,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Usuwa granice pomiędzy sąsiadującymi obszarami o tej samej kategorii lub " @@ -49108,7 +49108,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -69282,7 +69282,7 @@ msgstr "" #~ msgstr "Liczba warstw (na)" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "Zbyt dużo nazw kolmun" #, fuzzy diff --git a/locale/po/grassmods_pt.po b/locale/po/grassmods_pt.po index 4a28f18ff64..8f06bf66795 100644 --- a/locale/po/grassmods_pt.po +++ b/locale/po/grassmods_pt.po @@ -255,7 +255,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -547,19 +547,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -613,7 +613,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -798,11 +798,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -824,12 +824,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -901,7 +901,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -956,7 +956,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -972,7 +972,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -986,7 +986,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1037,7 +1037,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1052,17 +1052,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -33030,7 +33030,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38260,7 +38260,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48676,7 +48676,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_pt_BR.po b/locale/po/grassmods_pt_BR.po index d7bcb3c7f41..285fc19148c 100644 --- a/locale/po/grassmods_pt_BR.po +++ b/locale/po/grassmods_pt_BR.po @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "raster" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "palavra-chave2" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "palavra-chave3" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "Meu primeiro módulo raster" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "Meu primeiro módulo raster" msgid "Raster map <%s> not found" msgstr "Mapa raster <%s> não encontrado" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "Mapa raster <%s> não encontrado" msgid "vector" msgstr "vetor" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "Meu primeiro módulo vetorial" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "Meu primeiro módulo vetorial" msgid "Vector map <%s> not found" msgstr "Mapa vetorial <%s> não encontrado" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "Não foi possível definir o nível vetorial aberto pré-determinado" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "Não foi possível definir o nível vetorial aberto pré-determinado" msgid "Unable to open vector map <%s>" msgstr "Não foi possível abrir mapa vetorial <%s>" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "Não foi possível abrir mapa vetorial <%s>" msgid "Unable to create vector map <%s>" msgstr "Não foi possível criar mapa vetorial <%s>" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "Não foi possível criar mapa vetorial <%s>" msgid "Database connection not defined for layer %d" msgstr "Conexão do banco de dados não definida para camada %d" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "Conexão do banco de dados não definida para camada %d" msgid "Unable to start driver <%s>" msgstr "Não foi possível iniciar o driver <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "Não foi possível iniciar o driver <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "Não foi possível abrir banco de dados <%s> com driver <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "Não foi possível abrir banco de dados <%s> com driver <%s>" msgid "Unable to describe table <%s>" msgstr "Não foi possível descrever a tabela <%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "Não foi possível obter dados de atributo para categoria %d" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "Erro ao recuperar o registro do banco de dados para categoria %d" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -33360,7 +33360,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38650,7 +38650,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -49178,7 +49178,7 @@ msgstr "Convertendo regras de cores em categorias..." #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_ro.po b/locale/po/grassmods_ro.po index 3f4174329a4..381049e994b 100644 --- a/locale/po/grassmods_ro.po +++ b/locale/po/grassmods_ro.po @@ -255,7 +255,7 @@ msgstr "" "driver: %s\n" "baza de date: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -547,19 +547,19 @@ msgstr "" msgid "raster" msgstr "raster" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "cuvânt cheie 2" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "cuvânt cheie 3" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -613,7 +613,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "Harta raster <%s> nu este găsită" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -798,11 +798,11 @@ msgstr "Harta raster <%s> nu este găsită" msgid "vector" msgstr "vector" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -824,12 +824,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "Harta vectorială <%s> nu a fost găsită" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -901,7 +901,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "Imposibil de deschis harta vectorială <%s>" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -956,7 +956,7 @@ msgstr "Imposibil de deschis harta vectorială <%s>" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -972,7 +972,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "Conexiunea bazei de date nu este definită pentru stratul %d" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -986,7 +986,7 @@ msgstr "Conexiunea bazei de date nu este definită pentru stratul %d" msgid "Unable to start driver <%s>" msgstr "Imposibil de pornit driver-ul<%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1037,7 +1037,7 @@ msgstr "Imposibil de pornit driver-ul<%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "Imposibil de deschis baza de date <%s> cu driverl <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1052,17 +1052,17 @@ msgstr "Imposibil de deschis baza de date <%s> cu driverl <%s>" msgid "Unable to describe table <%s>" msgstr "Nu s-a putut descrie tabelul<%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "Imposibil de obținut datele atribut pentru cat %d" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "Imposibil de copiat tabela de atribute la harta vectorială <%s>" @@ -33065,7 +33065,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38269,7 +38269,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Se dizolvă limitele dintre arealele adiacente un număr comun de categorii " @@ -48737,7 +48737,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_ru.po b/locale/po/grassmods_ru.po index 2630e39adcf..35bc5c306c4 100644 --- a/locale/po/grassmods_ru.po +++ b/locale/po/grassmods_ru.po @@ -255,7 +255,7 @@ msgstr "" "драйвер: %s\n" "база данных: %s" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -547,19 +547,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -613,7 +613,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "Растровая карта <%s> не найдена" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -798,11 +798,11 @@ msgstr "Растровая карта <%s> не найдена" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -824,12 +824,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "Векторная карта <%s> не найдена" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -901,7 +901,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "Не удалось открыть векторную карту <%s>" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -956,7 +956,7 @@ msgstr "Не удалось открыть векторную карту <%s>" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -972,7 +972,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "Не определено соединение с базой данных для слоя %d" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -986,7 +986,7 @@ msgstr "Не определено соединение с базой данны msgid "Unable to start driver <%s>" msgstr "Не удалось запустить драйвер <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1037,7 +1037,7 @@ msgstr "Не удалось запустить драйвер <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "Не удалось открыть базу данных <%s> с помощью драйвера <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1052,17 +1052,17 @@ msgstr "Не удалось открыть базу данных <%s> с пом msgid "Unable to describe table <%s>" msgstr "Невозможно описать таблицу <%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32900,7 +32900,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38055,7 +38055,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48284,7 +48284,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_si.po b/locale/po/grassmods_si.po index b2f15092dbc..d6aa6cebc6c 100644 --- a/locale/po/grassmods_si.po +++ b/locale/po/grassmods_si.po @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32646,7 +32646,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37639,7 +37639,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47629,7 +47629,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_sl.po b/locale/po/grassmods_sl.po index 56c5e73fd60..981de091d76 100644 --- a/locale/po/grassmods_sl.po +++ b/locale/po/grassmods_sl.po @@ -264,7 +264,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -556,20 +556,20 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 #, fuzzy msgid "My first raster module" msgstr "Ime obstoječega rastrskega sloja." -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -623,7 +623,7 @@ msgstr "Ime obstoječega rastrskega sloja." msgid "Raster map <%s> not found" msgstr "Ne najdem vhodnega rastrskega sloja <%s>." -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -808,12 +808,12 @@ msgstr "Ne najdem vhodnega rastrskega sloja <%s>." msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 #, fuzzy msgid "My first vector module" msgstr "Ime obstoječega rastrskega sloja." -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -835,13 +835,13 @@ msgstr "Ime obstoječega rastrskega sloja." msgid "Vector map <%s> not found" msgstr "Ne najdem vhodnega vektorja <%s>" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 #, fuzzy msgid "Unable to set predetermined vector open level" msgstr "Ime izhodnega vektorskega točkovnega sloja" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -913,7 +913,7 @@ msgstr "Ime izhodnega vektorskega točkovnega sloja" msgid "Unable to open vector map <%s>" msgstr "Ime izhodnega rastrskega sloja" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -968,7 +968,7 @@ msgstr "Ime izhodnega rastrskega sloja" msgid "Unable to create vector map <%s>" msgstr "Ime izhodnega rastrskega sloja" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -984,7 +984,7 @@ msgstr "Ime izhodnega rastrskega sloja" msgid "Database connection not defined for layer %d" msgstr "Povezava z bazo podatkov ni definirana\n" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -998,7 +998,7 @@ msgstr "Povezava z bazo podatkov ni definirana\n" msgid "Unable to start driver <%s>" msgstr "Ne morem zagnati gonilnika '%s'." -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1049,7 +1049,7 @@ msgstr "Ne morem zagnati gonilnika '%s'." msgid "Unable to open database <%s> by driver <%s>" msgstr "Ne morem odpreti baze podatkov %s z gonilnikom %s" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1064,17 +1064,17 @@ msgstr "Ne morem odpreti baze podatkov %s z gonilnikom %s" msgid "Unable to describe table <%s>" msgstr "Ne morem opisati tabele" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, fuzzy, c-format msgid "Unable to get attribute data for cat %d" msgstr "Ne morem izbrati (select) podatkov iz tabele" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, fuzzy, c-format msgid "Error while retrieving database record for cat %d" msgstr "Za vrstico (cat = %d) ni zapisa (record)<" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, fuzzy, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "Ime izhodnega rastrskega sloja" @@ -35453,7 +35453,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -40975,7 +40975,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -51966,7 +51966,7 @@ msgstr "Uporabljam koordinate središča sloja\n" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -73421,11 +73421,11 @@ msgstr "Ime izhodnega rastrskega sloja" #~ msgstr "Vhodni sloj" #, fuzzy -#~ msgid "Numpy array with vector cats." +#~ msgid "NumPy array with vector cats." #~ msgstr "%d točk je bilo zapisanih na izhodno datoteko\n" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "Ime rastrskega sloja" #, fuzzy diff --git a/locale/po/grassmods_ta.po b/locale/po/grassmods_ta.po index 4bd45b751d5..2cc76db5743 100644 --- a/locale/po/grassmods_ta.po +++ b/locale/po/grassmods_ta.po @@ -252,7 +252,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -544,19 +544,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -610,7 +610,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -795,11 +795,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -821,12 +821,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -898,7 +898,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -953,7 +953,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -969,7 +969,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -983,7 +983,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1034,7 +1034,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1049,17 +1049,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32662,7 +32662,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37669,7 +37669,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47684,7 +47684,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_th.po b/locale/po/grassmods_th.po index a38947544d5..4f763f25733 100644 --- a/locale/po/grassmods_th.po +++ b/locale/po/grassmods_th.po @@ -252,7 +252,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -544,19 +544,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -610,7 +610,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -795,11 +795,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -821,12 +821,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -898,7 +898,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -953,7 +953,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -969,7 +969,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -983,7 +983,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1034,7 +1034,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1049,17 +1049,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32774,7 +32774,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37894,7 +37894,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48103,7 +48103,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_tr.po b/locale/po/grassmods_tr.po index ef10aa29895..3ca146dfced 100644 --- a/locale/po/grassmods_tr.po +++ b/locale/po/grassmods_tr.po @@ -254,7 +254,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -546,19 +546,19 @@ msgstr "" msgid "raster" msgstr "raster" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "İlk raster modülüm" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -612,7 +612,7 @@ msgstr "İlk raster modülüm" msgid "Raster map <%s> not found" msgstr "<%s> raster haritası bulunamadı" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -797,11 +797,11 @@ msgstr "<%s> raster haritası bulunamadı" msgid "vector" msgstr "vektör" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "İlk vektör modülüm" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -823,12 +823,12 @@ msgstr "İlk vektör modülüm" msgid "Vector map <%s> not found" msgstr "<%s> vektör haritası bulunamadı" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "Önceden belirlenen vektör açma katmanı ayarlanamıyor" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -900,7 +900,7 @@ msgstr "Önceden belirlenen vektör açma katmanı ayarlanamıyor" msgid "Unable to open vector map <%s>" msgstr "<%s> vektör haritası açılamıyor" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -955,7 +955,7 @@ msgstr "<%s> vektör haritası açılamıyor" msgid "Unable to create vector map <%s>" msgstr "<%s> vektör haritası oluşturulamıyor" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -971,7 +971,7 @@ msgstr "<%s> vektör haritası oluşturulamıyor" msgid "Database connection not defined for layer %d" msgstr "%d katmanının veritabanı bağlantısı belirlenmedi" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -985,7 +985,7 @@ msgstr "%d katmanının veritabanı bağlantısı belirlenmedi" msgid "Unable to start driver <%s>" msgstr "Sürücü başlatılamıyor<%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1036,7 +1036,7 @@ msgstr "Sürücü başlatılamıyor<%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "<%s> sürücüsüyle <%s> veritabanı açılamıyor" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1051,17 +1051,17 @@ msgstr "<%s> sürücüsüyle <%s> veritabanı açılamıyor" msgid "Unable to describe table <%s>" msgstr "<%s> tablosu tanımlanamıyor" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -33009,7 +33009,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38276,7 +38276,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48802,7 +48802,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" @@ -68743,7 +68743,7 @@ msgstr "" #~ msgstr "Katman numarası" #, fuzzy -#~ msgid "Numpy array with columns names." +#~ msgid "NumPy array with columns names." #~ msgstr "Çok fazla sütun adı" #, fuzzy diff --git a/locale/po/grassmods_uk.po b/locale/po/grassmods_uk.po index ded3fc09c7a..7403997e3ba 100644 --- a/locale/po/grassmods_uk.po +++ b/locale/po/grassmods_uk.po @@ -252,7 +252,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -544,19 +544,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -610,7 +610,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -795,11 +795,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -821,12 +821,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -898,7 +898,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -953,7 +953,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -969,7 +969,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -983,7 +983,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1034,7 +1034,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1049,17 +1049,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32686,7 +32686,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37679,7 +37679,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47669,7 +47669,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_vi.po b/locale/po/grassmods_vi.po index cb4f0aa95a9..7002874a944 100644 --- a/locale/po/grassmods_vi.po +++ b/locale/po/grassmods_vi.po @@ -252,7 +252,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -544,19 +544,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -610,7 +610,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -795,11 +795,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -821,12 +821,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -898,7 +898,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -953,7 +953,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -969,7 +969,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -983,7 +983,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1034,7 +1034,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1049,17 +1049,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32777,7 +32777,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37924,7 +37924,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48138,7 +48138,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_zh.po b/locale/po/grassmods_zh.po index 3ec8942025d..0be17badee4 100644 --- a/locale/po/grassmods_zh.po +++ b/locale/po/grassmods_zh.po @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "无法启动驱动 <%s>" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "无法启动驱动 <%s>" msgid "Unable to open database <%s> by driver <%s>" msgstr "无法由驱动<%s>打开数据库 <%s>" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "无法由驱动<%s>打开数据库 <%s>" msgid "Unable to describe table <%s>" msgstr "无法描述表 <%s>" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32927,7 +32927,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -38146,7 +38146,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -48560,7 +48560,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grassmods_zh_CN.po b/locale/po/grassmods_zh_CN.po index 476cc510193..87e296038ea 100644 --- a/locale/po/grassmods_zh_CN.po +++ b/locale/po/grassmods_zh_CN.po @@ -252,7 +252,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -544,19 +544,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -610,7 +610,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -795,11 +795,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -821,12 +821,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -898,7 +898,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -953,7 +953,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -969,7 +969,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -983,7 +983,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1034,7 +1034,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1049,17 +1049,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32627,7 +32627,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37620,7 +37620,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47610,7 +47610,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/po/grasswxpy_ar.po b/locale/po/grasswxpy_ar.po index e0cd184896d..5a3439edac1 100644 --- a/locale/po/grasswxpy_ar.po +++ b/locale/po/grasswxpy_ar.po @@ -1092,7 +1092,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7568,7 +7568,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12909,7 +12909,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14244,7 +14244,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14314,7 +14314,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14934,10 +14934,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17919,17 +17915,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_bn.po b/locale/po/grasswxpy_bn.po index 27ab019eb1f..70368624833 100644 --- a/locale/po/grasswxpy_bn.po +++ b/locale/po/grasswxpy_bn.po @@ -1094,7 +1094,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7576,7 +7576,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12919,7 +12919,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14254,7 +14254,7 @@ msgstr "ত্রুটি" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14324,7 +14324,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14944,10 +14944,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17930,17 +17926,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_cs.po b/locale/po/grasswxpy_cs.po index f4d64a7cf4a..c2e97a4a0ba 100644 --- a/locale/po/grasswxpy_cs.po +++ b/locale/po/grasswxpy_cs.po @@ -1111,12 +1111,12 @@ msgstr "Nastavení 3D zobrazení bylo uloženo do souboru <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Tento modul vyžaduje modul NumPy, který nelze importovat. Nejspíše není " "nainstalován (není součástí standardní instalace Pythonu). Podívejte se na " -"stránku Numeric Python (http://numpy.scipy.org), kde naleznete další " +"stránku Numeric Python (https://numpy.org), kde naleznete další " "informace o stažení a instalaci." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -7757,7 +7757,7 @@ msgstr "Rozloží společné hranice " #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Rozloží hranice mezi sousedícími plochami, sdílejícími společné číslo " @@ -13205,7 +13205,7 @@ msgid "Zoom to saved region extents" msgstr "Přiblížit na uložený region" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "Nastavit výpočetní region podle pojmenovaného regionu" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14598,7 +14598,7 @@ msgstr "Chyba:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Nelze vykonat příkaz: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14668,7 +14668,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15316,10 +15316,6 @@ msgstr "Nastavit výpočetní region podle aktuálního okna" msgid "Set computational region extent interactively" msgstr "Nastavit výpočetní region interaktivně" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "Nastavit výpočetní region podle pojmenovaného regionu" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "Uložit výpočetní region jako pojmenovaný region" @@ -18369,17 +18365,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "Nelze vypočítat směrodatnou odchylku." #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_de.po b/locale/po/grasswxpy_de.po index 2a52e3dd394..42e35b2a4c8 100644 --- a/locale/po/grasswxpy_de.po +++ b/locale/po/grasswxpy_de.po @@ -1122,12 +1122,12 @@ msgstr "Einstellungen für 3D-Ansicht gespeichert in Datei <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Dieses erfordert NumPy, das nicht importiert werden konnte. Es ist eventuell " "nicht installiert (es ist nicht Bestandteil der Standard-Python-" -"Distribution). Gehe zur Numeric Python Website (http://numpy.scipy.org), um " +"Distribution). Gehe zur Numeric Python Website (https://numpy.org), um " "den Quellcode oder Binaries zu erhalten." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -6948,8 +6948,8 @@ msgstr "SIMWE Oberflächenabflussmodellierung" #: ../gui/wxpython/menustrings.py:428 ../gui/wxpython/menustrings.py:1391 msgid "Overland flow hydrologic simulation using path sampling method (SIMWE)." msgstr "" -"Hydrologische Simulation des Oberflächenabfluss mittels \"path " -"sampling\" (SIMWE)." +"Hydrologische Simulation des Oberflächenabfluss mittels \"path sampling\" " +"(SIMWE)." #: ../gui/wxpython/menustrings.py:429 ../gui/wxpython/menustrings.py:1392 msgid "SIMWE Sediment flux modeling" @@ -13102,7 +13102,7 @@ msgid "Zoom to saved region extents" msgstr "Auf die Ausdehnung der gespeicherten Region vergrößern" #: ../gui/wxpython/mapwin/buffered.py:2102 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2113 @@ -14486,7 +14486,7 @@ msgstr "Fehler: " #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Kann den Befehl '%s' nicht ausführen." #: ../gui/wxpython/core/utils.py:50 @@ -14565,7 +14565,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15227,10 +15227,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1563 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1566 msgid "Save computational region to named region" msgstr "" @@ -18297,17 +18293,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4047 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4056 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4083 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4091 diff --git a/locale/po/grasswxpy_el.po b/locale/po/grasswxpy_el.po index 21230ae0488..59e9e135df9 100644 --- a/locale/po/grasswxpy_el.po +++ b/locale/po/grasswxpy_el.po @@ -1097,14 +1097,14 @@ msgstr "Οι επιλογες της 3D προβολής αποθηκεύτηκ msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Αυτό το πρόσθετο απαιτεί το πρόσθετο NumPy, το οποίο δεν μπόρεσε να " "εισαχθεί. Πιθανότατα να μην είναι εγκατεστημένο (δεν είναι μέρος της γενικής " "διανομής της Python). Για περισσότερες πληροφορίες και για λήψη αρχείων προς " "εγκατάσταση καθώς και του πηγαίου κώδικα, απευθυνθήτε στην ιστοσελίδα της " -"Numpy (http://numpy.scipy.org)." +"NumPy (https://numpy.org)." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 #: ../gui/wxpython/iclass/g.gui.iclass.py:88 @@ -7647,7 +7647,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -13012,7 +13012,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14352,7 +14352,7 @@ msgstr "Σφάλμα:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Αδυναμία εκτέλεσης εντολής: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14422,7 +14422,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15045,10 +15045,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18053,17 +18049,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_es.po b/locale/po/grasswxpy_es.po index 7e0d2865020..e5ccaf4d59c 100644 --- a/locale/po/grasswxpy_es.po +++ b/locale/po/grasswxpy_es.po @@ -1130,12 +1130,12 @@ msgstr "Las configuraciones de la vista 3D se han guardado en el archivo <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" -"Este módulo requiere el módulo NumP, que no pudo ser importado. " +"Este módulo requiere el módulo NumPy, que no pudo ser importado. " "Probablemente no está instalado (no es parte de la distibución estándar de " -"Python). Ver el site Numérico de Python (http://numpy.scipy.org) para más " +"Python). Ver el site Numérico de Python (https://numpy.org) para más " "información sobre la fuente de descarga o binarios." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -8127,7 +8127,7 @@ msgstr "Disolver contornos" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Disuelve contornos entre áreas adyacentes que comparten un número de " @@ -13890,7 +13890,7 @@ msgid "Zoom to saved region extents" msgstr "Acercamiento a la extensión de la región guardada (zoom to saved...)" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "Definir región computacional desde la región nombrada" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -15348,7 +15348,7 @@ msgstr "Error:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "No se puede ejecutar el comando: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -15428,7 +15428,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "Etiqueta desconocida %s" #: ../gui/wxpython/core/debug.py:46 @@ -16108,10 +16108,6 @@ msgstr "Definir región computacional a partir de la visualización" msgid "Set computational region extent interactively" msgstr "Definir extensión de la región computacional de manera interactiva" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "Establecer región computacional desde región nombrada" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "Guardar región computacional con nombre" @@ -19292,19 +19288,19 @@ msgid "Statistics is not support for DBF tables." msgstr "Las estadísticas no están soportadas para tablas DBF." #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "No es posible calcular estadísticas." #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" "No se pueden calcular las estadísticas. Número de líneas %d inválido (debe " "ser %d)." #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "No es posible calcular desviación estándar." #: ../gui/wxpython/dbmgr/base.py:4106 @@ -22328,12 +22324,6 @@ msgstr "etiquetas:" #~ msgid "Deactive overwrite" #~ msgstr "Desactivar sobreescribir" -#~ msgid "Systematic contiguos" -#~ msgstr "Sistemático continuo" - -#~ msgid "Systematic non contiguos" -#~ msgstr "Sistemático no continuo" - #, python-format #~ msgid "Unable to load icon theme. Reason: %s. Quiting wxGUI..." #~ msgstr "" diff --git a/locale/po/grasswxpy_fi.po b/locale/po/grasswxpy_fi.po index 2e7a8a52e1c..ab1451450e7 100644 --- a/locale/po/grasswxpy_fi.po +++ b/locale/po/grasswxpy_fi.po @@ -1094,7 +1094,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7592,7 +7592,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12942,7 +12942,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14285,7 +14285,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14355,7 +14355,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14975,10 +14975,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17969,17 +17965,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_fr.po b/locale/po/grasswxpy_fr.po index acafa54fde4..ca568bbe2ae 100644 --- a/locale/po/grasswxpy_fr.po +++ b/locale/po/grasswxpy_fr.po @@ -1127,12 +1127,12 @@ msgstr "Paramètres de vue 3D enregistrés dans le fichier <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Ce module requiert le module NumPy, qui n'a pas pu être importé. Il n'est " "probablement pas installé (ne fait pas partie de la distribution standard " -"Python). Voir le site Numeric Python (http://numpy.scipy.org) pour " +"Python). Voir le site Numeric Python (https://numpy.org) pour " "télécharger les binaires ou le code source." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -8108,7 +8108,7 @@ msgstr "Fusionner des contours" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Fusionne des contours entre surfaces adjacentes partageant un attribut ou " @@ -11908,8 +11908,8 @@ msgstr "Niveau de verbosité :" #: ../gui/wxpython/gui_core/preferences.py:1294 msgid "Number of threads for parallel computing (supported tools only):" msgstr "" -"Nombre de threads pour le calcul parallèle (outils pris en charge uniquement)" -" :" +"Nombre de threads pour le calcul parallèle (outils pris en charge " +"uniquement) :" #: ../gui/wxpython/gui_core/preferences.py:1319 msgid "Maximum memory in MB to be used (supported tools only):" @@ -13849,7 +13849,7 @@ msgid "Zoom to saved region extents" msgstr "Zoomer sur les limites de la région enregistrée" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "Définir la région de calcul à partir d'une région enregistrée" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -15300,7 +15300,7 @@ msgstr "Erreur :" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Impossible d'exécuter la commande : '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -15379,7 +15379,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "Étiquette inconnue %s" #: ../gui/wxpython/core/debug.py:46 @@ -16054,10 +16054,6 @@ msgstr "Définir la région calculée à partir de celle de l'écran" msgid "Set computational region extent interactively" msgstr "Définir l'emprise de la région de calcul de manière interactive" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "Définir la région calculée depuis un nom de région" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "Enregistrer la région calculée vers un nom de région" @@ -19232,19 +19228,19 @@ msgid "Statistics is not support for DBF tables." msgstr "Les statistiques ne sont pas suportées pour les tables DBF." #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "Impossible de calculer les statistiques" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" "Impossible de calculer les statistiques. Nombre de lignes %d invalides " "(devrait être %d)." #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "Impossible de calculer l'écart-type." #: ../gui/wxpython/dbmgr/base.py:4106 @@ -21053,6 +21049,9 @@ msgstr "Cliquez-déposez pour redimensionner l'objet" msgid "labels: " msgstr "étiquettes :" +msgid "supported tools only" +msgstr "outils pris en charge uniquement" + #~ msgid "Location Name:" #~ msgstr "Nom du secteur : " @@ -22505,12 +22504,6 @@ msgstr "étiquettes :" #~ msgid "Deactive overwrite" #~ msgstr "Désactiver l'écrasement" -#~ msgid "Systematic contiguos" -#~ msgstr "systématiquement contigüs" - -#~ msgid "Systematic non contiguos" -#~ msgstr "Systématiquement non contigüs" - #, python-format #~ msgid "Unable to load icon theme. Reason: %s. Quiting wxGUI..." #~ msgstr "" @@ -22522,6 +22515,3 @@ msgstr "étiquettes :" #~ msgstr "" #~ "Impossible d'obtenir l’extension géographique courante. Sortie forcée de " #~ "wxGUI. Merci d'exécuter manuellement g.region pour résoudre ce problème." - -msgid "supported tools only" -msgstr "outils pris en charge uniquement" diff --git a/locale/po/grasswxpy_hu.po b/locale/po/grasswxpy_hu.po index a512c692f8b..4b1d57eb8fe 100644 --- a/locale/po/grasswxpy_hu.po +++ b/locale/po/grasswxpy_hu.po @@ -1101,12 +1101,12 @@ msgstr "3D nézet beállításait mentettem a <%s> fájlba." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Ez a modul a NumPy modult igényli, melyet nem tudtam importálni. Ezt " "valószínűleg nem telepítették (ez nem része a standard Python kiadásoknak). " -"Lásd a Numeric Python oldalt (http://numpy.scipy.org) a forrás vagy bináris " +"Lásd a Numeric Python oldalt (https://numpy.org) a forrás vagy bináris " "letöltési információkért." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -7715,7 +7715,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -13132,7 +13132,7 @@ msgid "Zoom to saved region extents" msgstr "Nagyítás a mentett régió terjedelmére" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "Számítási régió beállítása névvel bíró régióval" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14522,7 +14522,7 @@ msgstr "Hiba:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Nem tudom végrehajtani a parancsot: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14592,7 +14592,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15220,10 +15220,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "Számítási régió interaktív beállítása" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "Számítási régió beállítása névvel bíró régióval" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "Számítási régió mentése névvel bíró régióba" @@ -18238,17 +18234,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "Nem tudok statisztikát számítani." #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "Nem tudok szórást számítani." #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_id_ID.po b/locale/po/grasswxpy_id_ID.po index b3876296a0e..16ef34c40be 100644 --- a/locale/po/grasswxpy_id_ID.po +++ b/locale/po/grasswxpy_id_ID.po @@ -1094,7 +1094,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7571,7 +7571,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12912,7 +12912,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14247,7 +14247,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14317,7 +14317,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14937,10 +14937,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17922,17 +17918,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_it.po b/locale/po/grasswxpy_it.po index dafa7437a82..6f400df3b19 100644 --- a/locale/po/grasswxpy_it.po +++ b/locale/po/grasswxpy_it.po @@ -1119,12 +1119,12 @@ msgstr "Impostazioni visualizzazione 3D salvate nel file <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Questo modulo richiede NumPy, che non è stato possibile importare. " "Probabilmente non è installato (non fa parte della distribuzione standard di " -"Python). Controlla il sito di Numeric Python (http://numpy.scipy.org) per " +"Python). Controlla il sito di Numeric Python (https://numpy.org) per " "maggiori informazione sul download del sorgente o dei binari." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -7922,7 +7922,7 @@ msgstr "Dissolvi confini" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Dissolve i confini tra aree adiacenti che condividono categorie o attributi " @@ -13535,7 +13535,7 @@ msgid "Zoom to saved region extents" msgstr "Zoom all'estensione della regione salvata" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "Imposta la regione computazionale dalla regione salvata" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14967,7 +14967,7 @@ msgstr "Errore:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Impossibile eseguire il comando '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -15044,7 +15044,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "Etichetta sconosciuta %s" #: ../gui/wxpython/core/debug.py:46 @@ -15709,10 +15709,6 @@ msgstr "Imposta la regione computazionale dall'estensione del display" msgid "Set computational region extent interactively" msgstr "Imposta la region computazionale interattivamente" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "Imposta regione computazionale da una regione precedentemente salvata" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "Salva la regione computazionale a una regione con nome" @@ -18846,19 +18842,19 @@ msgid "Statistics is not support for DBF tables." msgstr "Statistiche non è supportato per le tabelle DBF." #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "Impossibile calcolare le statistiche." #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" "Impossibile calcolare le statistiche. Numero non valido di linee %d " "(dovrebbe essere %d)." #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "Impossibile calcolare la deviazione standard." #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_ja.po b/locale/po/grasswxpy_ja.po index 2feb9436763..6e43667a294 100644 --- a/locale/po/grasswxpy_ja.po +++ b/locale/po/grasswxpy_ja.po @@ -1102,13 +1102,13 @@ msgstr "3D表示設定がファイルに保存されました <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" -"このモジュールはインポートされないNumpyモジュールを必要とします. おそらくイン" +"このモジュールはインポートされないNumPyモジュールを必要とします. おそらくイン" "ストールされていません(標準的なPython分布の一環ではありません). ソースまた" -"はバイナリのダウンロードに関する情報は数値Pythonのサイト(http://numpy.scipy." -"org)を参照してください" +"はバイナリのダウンロードに関する情報は数値Pythonのサイト(https://numpy.org" +")を参照してください" #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 #: ../gui/wxpython/iclass/g.gui.iclass.py:88 @@ -7723,7 +7723,7 @@ msgstr "境界の融合" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "共通のカテゴリー番号や属性を共有するエリアを融合させる" @@ -13140,7 +13140,7 @@ msgid "Zoom to saved region extents" msgstr "保存領域をズーム" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14518,7 +14518,7 @@ msgstr "エラー:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "コマンドが実行できません: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14596,7 +14596,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "不明なタグ %s" #: ../gui/wxpython/core/debug.py:46 @@ -15242,10 +15242,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18294,17 +18290,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_ko.po b/locale/po/grasswxpy_ko.po index bbb4a9a638b..aedfb614f94 100644 --- a/locale/po/grasswxpy_ko.po +++ b/locale/po/grasswxpy_ko.po @@ -1094,12 +1094,12 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "이 모듈은 NumPy 모듈이 필요하지만 가져올 수 없었습니다. 표준 파이썬 배포에 포" "함되지 않았기 때문에 아마도 설치되지 않은 것 같습니다. 소스나 이진코드를 내려" -"받는데 필요한 정보는 Numeric Python 사이트 http://numpy.scipy.org에서 확인하" +"받는데 필요한 정보는 Numeric Python 사이트 https://numpy.org에서 확인하" "세요." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -7640,7 +7640,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -13016,7 +13016,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14381,7 +14381,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14451,7 +14451,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15073,10 +15073,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18094,17 +18090,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_lv.po b/locale/po/grasswxpy_lv.po index 83f6f002239..55fc29c8662 100644 --- a/locale/po/grasswxpy_lv.po +++ b/locale/po/grasswxpy_lv.po @@ -1096,7 +1096,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7710,7 +7710,7 @@ msgstr "Izšķīdināt robežas" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -13106,7 +13106,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14461,7 +14461,7 @@ msgstr "Kļūda: " #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Nebija iespējams izpildīt komandu: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14531,7 +14531,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "Nezināma birka %s" #: ../gui/wxpython/core/debug.py:46 @@ -15155,10 +15155,6 @@ msgstr "Iestatīt aprēķinu reģiona apjomu no kartes skata" msgid "Set computational region extent interactively" msgstr "Interaktīvi iestatīt aprēķinu reģiona apjomu" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18167,17 +18163,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_ml.po b/locale/po/grasswxpy_ml.po index 73eaedf0eb5..6add41bd33a 100644 --- a/locale/po/grasswxpy_ml.po +++ b/locale/po/grasswxpy_ml.po @@ -1094,7 +1094,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7611,7 +7611,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12968,7 +12968,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14331,7 +14331,7 @@ msgstr "തെറ്റ്:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "ആജ്ഞ നടപ്പാക്കുന്നതിൽ പാളിച: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14401,7 +14401,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15022,10 +15022,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18020,17 +18016,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_pl.po b/locale/po/grasswxpy_pl.po index c4826ee408a..d049c5c7263 100644 --- a/locale/po/grasswxpy_pl.po +++ b/locale/po/grasswxpy_pl.po @@ -1105,7 +1105,7 @@ msgstr "Ustawienia widoku 3D zapisano w pliku <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7768,7 +7768,7 @@ msgstr "Granice regionu" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Usuwa granice pomiędzy sąsiadującymi obszarami o tej samej kategorii lub " @@ -13183,7 +13183,7 @@ msgid "Zoom to saved region extents" msgstr "Przybliż do zapisanego regionu" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "Ustaw region obliczeniowy z nazwanego regionu" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14577,7 +14577,7 @@ msgstr "Błąd:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Nie można wykonać polecenia: %s" #: ../gui/wxpython/core/utils.py:50 @@ -14652,7 +14652,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15292,10 +15292,6 @@ msgstr "Ustaw zasięg wyświetlonej mapy jako region obliczeniowy" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "Zapisz region obliczeniowy do nazwanego regionu" @@ -18348,17 +18344,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_pt.po b/locale/po/grasswxpy_pt.po index bae0e777dbe..5e097cfeb15 100644 --- a/locale/po/grasswxpy_pt.po +++ b/locale/po/grasswxpy_pt.po @@ -1097,7 +1097,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7628,7 +7628,7 @@ msgstr "Dissolver fronteiras/contornos" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12993,7 +12993,7 @@ msgid "Zoom to saved region extents" msgstr "Zoom para a extensão da região guardada" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14338,7 +14338,7 @@ msgstr "Erro:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14408,7 +14408,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15041,10 +15041,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18050,17 +18046,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_pt_BR.po b/locale/po/grasswxpy_pt_BR.po index 8888a1631a8..dc2cfb7d4c2 100644 --- a/locale/po/grasswxpy_pt_BR.po +++ b/locale/po/grasswxpy_pt_BR.po @@ -1121,12 +1121,12 @@ msgstr "Configurações de visualização 3D salvas no arquivo <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Este módulo requer o módulo NumPy, que não pôde ser importado. Provavelmente " "não está instalado (não faz parte da distribuição padrão do Python). " -"Consulte o site Numeric Python (http://numpy.scipy.org) para obter " +"Consulte o site Numeric Python (https://numpy.org) para obter " "informações sobre como baixar source ou binários." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -7995,7 +7995,7 @@ msgstr "Dissolver fronteiras" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Dissolve fronteiras entre áreas adjacentes que compartilham um número de " @@ -13682,7 +13682,7 @@ msgid "Zoom to saved region extents" msgstr "Zoom para extensões de região salvas" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "Definir região compulacional da região nomeada" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -15110,7 +15110,7 @@ msgstr "Erro:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Não foi possível executar o comando: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -15188,7 +15188,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "Rótulo %s desconhecido" #: ../gui/wxpython/core/debug.py:46 @@ -15863,10 +15863,6 @@ msgstr "Definir a extensão da região computacional a partir da exibição" msgid "Set computational region extent interactively" msgstr "Definir a extensão da região computacional interativamente" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "Definir a região computacional da região nomeada" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "Salvar região computacional para região nomeada" @@ -18990,17 +18986,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 @@ -21946,12 +21942,6 @@ msgstr "rótulos:" #~ msgid "Deactive overwrite" #~ msgstr "Desativar susbstituição" -#~ msgid "Systematic contiguos" -#~ msgstr "Contiguo sistemático" - -#~ msgid "Systematic non contiguos" -#~ msgstr "Não contiguo sistemático" - #, python-format #~ msgid "Unable to load icon theme. Reason: %s. Quiting wxGUI..." #~ msgstr "" diff --git a/locale/po/grasswxpy_ro.po b/locale/po/grasswxpy_ro.po index 4eb12668f10..0d1c17d2d7f 100644 --- a/locale/po/grasswxpy_ro.po +++ b/locale/po/grasswxpy_ro.po @@ -1118,12 +1118,12 @@ msgstr "Setările de vizualizare 3D salvate ca fișier <%s>." msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Acest modul necesită modulul NumPy, care nu a putut fi importat. Probabil nu " "este instalat (nu este parte a distribuției standard de Python). Vezi site-" -"ul Numeric Python (http://numpy.scipy.org) pentru informații despre " +"ul Numeric Python (https://numpy.org) pentru informații despre " "descărcarea codului sursă." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 @@ -7799,7 +7799,7 @@ msgstr "Dizolvă limitele" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Se dizolvă limitele dintre arealele adiacente un număr comun de categorii " @@ -13292,7 +13292,7 @@ msgid "Zoom to saved region extents" msgstr "Zoom pentru a salva extinderea regiunii" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14673,7 +14673,7 @@ msgstr "Eroare:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Nu s-a putut executa comanda: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14749,7 +14749,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15407,10 +15407,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18497,17 +18493,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_ru.po b/locale/po/grasswxpy_ru.po index 4d6f7d54fc6..5d3705c7621 100644 --- a/locale/po/grasswxpy_ru.po +++ b/locale/po/grasswxpy_ru.po @@ -1118,14 +1118,14 @@ msgstr "Параметры трёхмерного вида сохранены в msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "Для работы данного модуля необходимо наличие модуля NumPy, который не " "удалось импортировать. Вероятно, он не установлен (не является частью " "стандартного дистрибутива Python). Для получения информации о загрузке " -"исходного кода или двоичных файлы посетите сайт «Числовой Python» (http://" -"numpy.scipy.org)." +"исходного кода или двоичных файлы посетите сайт «Числовой Python» (https://" +"numpy.org)." #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 #: ../gui/wxpython/iclass/g.gui.iclass.py:88 @@ -8225,7 +8225,7 @@ msgstr "Удаление границ" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Удаляет границы между смежными полигонами с одинаковым номером категории или " @@ -13975,7 +13975,7 @@ msgid "Zoom to saved region extents" msgstr "Задать масштаб до размера сохранённого фрагмента" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "Задать расчётный фрагмент от именованного фрагмента" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -15414,7 +15414,7 @@ msgstr "Ошибка: " #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "Не удалось выполнить команду: «%s»" #: ../gui/wxpython/core/utils.py:50 @@ -15492,7 +15492,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "Неизвестный тег %s" #: ../gui/wxpython/core/debug.py:46 @@ -16173,10 +16173,6 @@ msgstr "Задайте охват расчётного фрагмента из msgid "Set computational region extent interactively" msgstr "Задайте охват расчётного фрагмента в интерактивном режиме" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "Задайте охват расчётного фрагмента из именованного фрагмента" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "Сохраните охват расчётного фрагмента из именованного фрагмента" @@ -19344,19 +19340,19 @@ msgid "Statistics is not support for DBF tables." msgstr "Статистика для таблиц DBF не поддерживается." #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "Не удалось вычислить статистику." #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" "Не удалось вычислить статистику. Недопустимое количество линий %d (должно " "быть %d)." #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "Не удалось рассчитать стандартное отклонение." #: ../gui/wxpython/dbmgr/base.py:4106 @@ -22052,12 +22048,6 @@ msgstr "подписи:" #~ msgid "Georectifier manual" #~ msgstr "Руководство для инструмента геопривязки" -#~ msgid "Systematic contiguos" -#~ msgstr "Систематическая непрерывная выборка" - -#~ msgid "Systematic non contiguos" -#~ msgstr "Систематическая несвязная выборка" - #, python-brace-format #~ msgid "" #~ "The following error occured when deleting mapset <{path}>:\n" diff --git a/locale/po/grasswxpy_si.po b/locale/po/grasswxpy_si.po index af376434553..7d7c44acc16 100644 --- a/locale/po/grasswxpy_si.po +++ b/locale/po/grasswxpy_si.po @@ -1094,7 +1094,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7570,7 +7570,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12911,7 +12911,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14246,7 +14246,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14316,7 +14316,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14936,10 +14936,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17921,17 +17917,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_sl.po b/locale/po/grasswxpy_sl.po index ecd79bf6108..0fd856d5061 100644 --- a/locale/po/grasswxpy_sl.po +++ b/locale/po/grasswxpy_sl.po @@ -1092,7 +1092,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7568,7 +7568,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12909,7 +12909,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14244,7 +14244,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14314,7 +14314,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14934,10 +14934,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17919,17 +17915,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_ta.po b/locale/po/grasswxpy_ta.po index d9a3d921f5a..78526979368 100644 --- a/locale/po/grasswxpy_ta.po +++ b/locale/po/grasswxpy_ta.po @@ -1095,7 +1095,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7580,7 +7580,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12925,7 +12925,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14260,7 +14260,7 @@ msgstr "பிழை:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "கட்டளையை செயல்படுத்த முடியவில்லை: '%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14330,7 +14330,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14950,10 +14950,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17936,17 +17932,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_th.po b/locale/po/grasswxpy_th.po index 359511f80b9..f3b63365d9b 100644 --- a/locale/po/grasswxpy_th.po +++ b/locale/po/grasswxpy_th.po @@ -1094,7 +1094,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7598,7 +7598,7 @@ msgstr "ละลาย ขอบเขต" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "Dissolves เขต ระหว่าง พื้นที่ที่อยู่ติดกันและมี ข้อมูลเหมือนกัน" @@ -12962,7 +12962,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14298,7 +14298,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14368,7 +14368,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14990,10 +14990,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17984,17 +17980,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_tr.po b/locale/po/grasswxpy_tr.po index cef0605d3d9..34c69f06ff0 100644 --- a/locale/po/grasswxpy_tr.po +++ b/locale/po/grasswxpy_tr.po @@ -1096,7 +1096,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7662,7 +7662,7 @@ msgstr "Sınırları çöz" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 #, fuzzy msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" "Ortak kategori numarası veya özniteliği paylaşan komşu alanlar arasındaki " @@ -13045,7 +13045,7 @@ msgid "Zoom to saved region extents" msgstr "Kaydedilmiş bölge boyutlarına göre yakınlaştır" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14399,7 +14399,7 @@ msgstr "Hata:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "'%s' komutu çalıştırılamıyor" #: ../gui/wxpython/core/utils.py:50 @@ -14469,7 +14469,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15095,10 +15095,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18104,17 +18100,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_uk.po b/locale/po/grasswxpy_uk.po index a225fe82eb0..db3b10f93f3 100644 --- a/locale/po/grasswxpy_uk.po +++ b/locale/po/grasswxpy_uk.po @@ -1095,7 +1095,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7571,7 +7571,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12912,7 +12912,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14247,7 +14247,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14317,7 +14317,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14937,10 +14937,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17922,17 +17918,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_vi.po b/locale/po/grasswxpy_vi.po index c5621a9bf4e..1e15d066449 100644 --- a/locale/po/grasswxpy_vi.po +++ b/locale/po/grasswxpy_vi.po @@ -1095,7 +1095,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7581,7 +7581,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12923,7 +12923,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14258,7 +14258,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14328,7 +14328,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14948,10 +14948,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17934,17 +17930,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/po/grasswxpy_zh.po b/locale/po/grasswxpy_zh.po index 25fba6850cc..330b876a438 100644 --- a/locale/po/grasswxpy_zh.po +++ b/locale/po/grasswxpy_zh.po @@ -1089,11 +1089,11 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" "该模块需要使用的NumPy模块无法导入。可能没有安装该模块(非Python标准模块)。参" -"见Numeric Python网站(http://numpy.scipy.org)查看下载源码或安装文件信息。" +"见Numeric Python网站(https://numpy.org)查看下载源码或安装文件信息。" #: ../gui/wxpython/nviz/wxnviz.py:361 ../gui/wxpython/nviz/wxnviz.py:372 #: ../gui/wxpython/iclass/g.gui.iclass.py:88 @@ -7684,7 +7684,7 @@ msgstr "边界融合" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -13066,7 +13066,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14420,7 +14420,7 @@ msgstr "错误:" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "无法执行命令:'%s'" #: ../gui/wxpython/core/utils.py:50 @@ -14492,7 +14492,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -15117,10 +15117,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -18147,17 +18143,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 @@ -20720,12 +20716,6 @@ msgstr "标签:" #~ msgid "Automatically hightlight selected features in map display" #~ msgstr "地图显示窗口中自动高亮所选择要素" -#~ msgid "Systematic contiguos" -#~ msgstr "系统连续" - -#~ msgid "Systematic non contiguos" -#~ msgstr "系统不连续" - #~ msgid "" #~ "Unable to get current geographic extent. Force quiting wxGUI. Please " #~ "manually run g.region to fix the problem." diff --git a/locale/po/grasswxpy_zh_CN.po b/locale/po/grasswxpy_zh_CN.po index 64212537ad1..f03f638b9eb 100644 --- a/locale/po/grasswxpy_zh_CN.po +++ b/locale/po/grasswxpy_zh_CN.po @@ -1095,7 +1095,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7571,7 +7571,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12912,7 +12912,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14247,7 +14247,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14317,7 +14317,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -14937,10 +14937,6 @@ msgstr "" msgid "Set computational region extent interactively" msgstr "" -#: ../gui/wxpython/mapdisp/frame.py:1531 -msgid "Set computational region from named region" -msgstr "" - #: ../gui/wxpython/mapdisp/frame.py:1534 msgid "Save computational region to named region" msgstr "" @@ -17922,17 +17918,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/locale/templates/grassmods.pot b/locale/templates/grassmods.pot index 3476f2faa63..2648cf1de3b 100644 --- a/locale/templates/grassmods.pot +++ b/locale/templates/grassmods.pot @@ -251,7 +251,7 @@ msgid "" "database: %s" msgstr "" -#: ../doc/raster/r.example/main.c:83 ../raster/r.external/main.c:57 +#: ../doc/examples/raster/r.example/main.c:83 ../raster/r.external/main.c:57 #: ../raster/r.quantile/main.c:297 ../raster/r.terraflow/main.cpp:457 #: ../raster/r.info/main.c:71 ../raster/r.patch/main.c:68 #: ../raster/r.support.stats/main.c:35 ../raster/r.what.color/main.c:88 @@ -543,19 +543,19 @@ msgstr "" msgid "raster" msgstr "" -#: ../doc/raster/r.example/main.c:84 ../doc/vector/v.example/main.c:50 +#: ../doc/examples/raster/r.example/main.c:84 ../doc/examples/vector/v.example/main.c:50 msgid "keyword2" msgstr "" -#: ../doc/raster/r.example/main.c:85 ../doc/vector/v.example/main.c:51 +#: ../doc/examples/raster/r.example/main.c:85 ../doc/examples/vector/v.example/main.c:51 msgid "keyword3" msgstr "" -#: ../doc/raster/r.example/main.c:86 +#: ../doc/examples/raster/r.example/main.c:86 msgid "My first raster module" msgstr "" -#: ../doc/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 +#: ../doc/examples/raster/r.example/main.c:105 ../raster/r.terraflow/main.cpp:207 #: ../raster/r.terraflow/main.cpp:354 ../raster/r.terraflow/main.cpp:390 #: ../raster/r.info/main.c:111 ../raster/r.cost/main.c:364 #: ../raster/r.cost/main.c:391 ../raster/r.cost/main.c:711 @@ -609,7 +609,7 @@ msgstr "" msgid "Raster map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 +#: ../doc/examples/vector/v.example/main.c:49 ../raster/r.contour/main.c:90 #: ../raster/r.random/main.c:55 ../misc/m.nviz.script/main.c:71 #: ../misc/m.nviz.image/main.c:52 #: ../locale/scriptstrings/v.clip_to_translate.c:2 @@ -794,11 +794,11 @@ msgstr "" msgid "vector" msgstr "" -#: ../doc/vector/v.example/main.c:52 +#: ../doc/examples/vector/v.example/main.c:52 msgid "My first vector module" msgstr "" -#: ../doc/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 +#: ../doc/examples/vector/v.example/main.c:76 ../raster/r.cost/main.c:357 #: ../raster/r.walk/main.c:460 ../raster/r.carve/main.c:153 #: ../misc/m.nviz.image/vector.c:82 ../general/g.region/main.c:505 #: ../vector/v.lidar.edgedetection/main.c:181 ../vector/v.profile/main.c:371 @@ -820,12 +820,12 @@ msgstr "" msgid "Vector map <%s> not found" msgstr "" -#: ../doc/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 +#: ../doc/examples/vector/v.example/main.c:82 ../vector/v.profile/main.c:378 #: ../vector/v.cluster/main.c:137 msgid "Unable to set predetermined vector open level" msgstr "" -#: ../doc/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 +#: ../doc/examples/vector/v.example/main.c:88 ../raster/r.cost/main.c:599 #: ../raster/r.cost/main.c:666 ../raster/r.drain/main.c:277 #: ../raster/r.walk/main.c:760 ../raster/r.walk/main.c:827 #: ../raster/r.region/main.c:172 ../raster/r.sim/simlib/observation_points.c:40 @@ -897,7 +897,7 @@ msgstr "" msgid "Unable to open vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 +#: ../doc/examples/vector/v.example/main.c:101 ../raster/r.to.vect/main.c:159 #: ../raster/r.drain/main.c:204 ../raster/r.resamp.bspline/main.c:484 #: ../raster/r.sim/simlib/output.c:49 ../raster/r.sim/simlib/output.c:56 #: ../raster/r.flow/flow_io.c:181 ../raster/r.contour/main.c:155 @@ -952,7 +952,7 @@ msgstr "" msgid "Unable to create vector map <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 +#: ../doc/examples/vector/v.example/main.c:110 ../vector/v.generalize/misc.c:167 #: ../vector/v.to.rast/support.c:101 ../vector/v.to.rast/support.c:271 #: ../vector/v.to.rast/support.c:499 ../vector/v.distance/main.c:563 #: ../vector/v.buffer/main.c:422 ../vector/v.univar/main.c:236 @@ -968,7 +968,7 @@ msgstr "" msgid "Database connection not defined for layer %d" msgstr "" -#: ../doc/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 +#: ../doc/examples/vector/v.example/main.c:130 ../raster/r.stream.extract/close.c:176 #: ../db/db.execute/main.c:68 ../db/db.createdb/main.c:38 #: ../db/db.columns/main.c:47 ../db/db.dropdb/main.c:39 #: ../db/db.databases/main.c:47 ../db/db.describe/main.c:51 @@ -982,7 +982,7 @@ msgstr "" msgid "Unable to start driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 +#: ../doc/examples/vector/v.example/main.c:138 ../raster/r.to.vect/main.c:189 #: ../raster/r.contour/main.c:170 ../raster/r.volume/main.c:248 #: ../raster/r.random/random.c:67 ../misc/m.nviz.image/vector.c:301 #: ../raster3d/r3.flow/main.c:46 ../ps/ps.map/catval.c:53 @@ -1033,7 +1033,7 @@ msgstr "" msgid "Unable to open database <%s> by driver <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:143 ../db/db.columns/main.c:58 +#: ../doc/examples/vector/v.example/main.c:143 ../db/db.columns/main.c:58 #: ../db/db.describe/main.c:62 ../vector/v.db.connect/main.c:229 #: ../vector/v.out.vtk/writeVTK.c:651 ../vector/v.out.postgis/table.c:41 #: ../vector/v.reclass/main.c:189 ../vector/v.in.ascii/main.c:446 @@ -1048,17 +1048,17 @@ msgstr "" msgid "Unable to describe table <%s>" msgstr "" -#: ../doc/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 +#: ../doc/examples/vector/v.example/main.c:176 ../vector/v.profile/main.c:708 #, c-format msgid "Unable to get attribute data for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 +#: ../doc/examples/vector/v.example/main.c:185 ../vector/v.profile/main.c:717 #, c-format msgid "Error while retrieving database record for cat %d" msgstr "" -#: ../doc/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 +#: ../doc/examples/vector/v.example/main.c:220 ../vector/v.fill.holes/main.c:196 #, c-format msgid "Unable to copy attribute table to vector map <%s>" msgstr "" @@ -32646,7 +32646,7 @@ msgstr "" #: ../locale/scriptstrings/v.what.spoly_to_translate.c:1 msgid "" -"Queries vector map with overlaping \"spaghetti\" polygons (e.g. Landsat " +"Queries vector map with overlapping \"spaghetti\" polygons (e.g. Landsat " "footprints) at given location. Polygons must have not intersected boundaries." msgstr "" @@ -37639,7 +37639,7 @@ msgstr "" #: ../locale/scriptstrings/v.dissolve_to_translate.c:1 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -47629,7 +47629,7 @@ msgstr "" #: ../locale/scriptstrings/v.scatterplot_to_translate.c:85 msgid "" -"Colum with categories. If selected, a separate ellipse will be drawn for " +"Column with categories. If selected, a separate ellipse will be drawn for " "each group/category" msgstr "" diff --git a/locale/templates/grasswxpy.pot b/locale/templates/grasswxpy.pot index ed4066f6747..899d4518078 100644 --- a/locale/templates/grasswxpy.pot +++ b/locale/templates/grasswxpy.pot @@ -1093,7 +1093,7 @@ msgstr "" msgid "" "This module requires the NumPy module, which could not be imported. It " "probably is not installed (it's not part of the standard Python " -"distribution). See the Numeric Python site (http://numpy.scipy.org) for " +"distribution). See the Numeric Python site (https://numpy.org) for " "information on downloading source or binaries." msgstr "" @@ -7569,7 +7569,7 @@ msgstr "" #: ../gui/wxpython/menustrings.py:572 ../gui/wxpython/menustrings.py:1537 msgid "" -"Dissolves adjacent or overlaping features sharing a common category number " +"Dissolves adjacent or overlapping features sharing a common category number " "or attribute." msgstr "" @@ -12910,7 +12910,7 @@ msgid "Zoom to saved region extents" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2103 -msgid "Set compulational region from named region" +msgid "Set computational region from named region" msgstr "" #: ../gui/wxpython/mapwin/buffered.py:2114 @@ -14245,7 +14245,7 @@ msgstr "" #: ../gui/wxpython/core/gcmd.py:504 #, python-format -msgid "Unable to exectute command: '%s'" +msgid "Unable to execute command: '%s'" msgstr "" #: ../gui/wxpython/core/utils.py:50 @@ -14315,7 +14315,7 @@ msgstr "" #: ../gui/wxpython/core/menutree.py:155 #, python-format -msgid "Unknow tag %s" +msgid "Unknown tag %s" msgstr "" #: ../gui/wxpython/core/debug.py:46 @@ -17920,17 +17920,17 @@ msgid "Statistics is not support for DBF tables." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4062 -msgid "Unable to calculte statistics." +msgid "Unable to calculate statistics." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4071 #, python-format msgid "" -"Unable to calculte statistics. Invalid number of lines %d (should be %d)." +"Unable to calculate statistics. Invalid number of lines %d (should be %d)." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4098 -msgid "Unable to calculte standard deviation." +msgid "Unable to calculate standard deviation." msgstr "" #: ../gui/wxpython/dbmgr/base.py:4106 diff --git a/macosx/ReadMe.md b/macosx/ReadMe.md index 085c09c691b..2e9e05e4620 100644 --- a/macosx/ReadMe.md +++ b/macosx/ReadMe.md @@ -228,7 +228,7 @@ build)*: `--x-includes=/usr/X11R6/include --x-libraries=/usr/X11R6/lib` -To install the new Python GUI (see [REQUIREMENTS.html](../REQUIREMENTS.html) +To install the new Python GUI (see [REQUIREMENTS.md](../REQUIREMENTS.md) and [gui/wxpython/README](../gui/wxpython/README), wxpython installer available at [wxpython.org](https://wxpython.org/)), add this to configure (fill in the correct version at x.x.x.x for the wxpython you have installed): @@ -374,7 +374,7 @@ sudo ln -sf /Library/Frameworks/Tk.framework/Versions/8.5/libtkstub8.5.a \ ### Universal GPSBabel A universal GPSBabel CLI executable is now included in the OSX binary -from [www.gpsbabel.org](http://www.gpsbabel.org/). This does not need to +from [www.gpsbabel.org](https://www.gpsbabel.org/). This does not need to be 64bit. ### Universal NetPBM @@ -700,4 +700,4 @@ This program is free software under the GNU General Public License (>=v2). - + diff --git a/macosx/pkg/resources/ReadMe.rtf b/macosx/pkg/resources/ReadMe.rtf index 38cd111f3b1..c18857bf849 100644 --- a/macosx/pkg/resources/ReadMe.rtf +++ b/macosx/pkg/resources/ReadMe.rtf @@ -253,6 +253,6 @@ This program is free software under the GNU General Public License (>=v2).} - William Kyngesburye} \par \pard\plain \s0\nowidctlpar{\*\hyphen2\hyphlead2\hyphtrail2\hyphmax0}\cf0\kerning1\dbch\af7\langfe2052\dbch\af8\afs24\alang1081\loch\f3\fs24\lang1033\ql\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640{{\field{\*\fldinst HYPERLINK "mailto:kyngchaos@kyngchaos.com" }{\fldrslt \cf2\ul\ulc0\langfe255\alang255\lang255\cf1\i0\ulnone\ulc0\b0\rtlch \ltrch\loch\fs28\loch\f5 kyngchaos@kyngchaos.com}}} -\par \pard\plain \s0\nowidctlpar{\*\hyphen2\hyphlead2\hyphtrail2\hyphmax0}\cf0\kerning1\dbch\af7\langfe2052\dbch\af8\afs24\alang1081\loch\f3\fs24\lang1033\ql\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640{{\field{\*\fldinst HYPERLINK "http://www.kyngchaos.com" }{\fldrslt \cf2\ul\ulc0\langfe255\alang255\lang255\cf1\i0\ulnone\ulc0\b0\rtlch \ltrch\loch\fs28\loch\f5 -http://www.kyngchaos.com/}}} +\par \pard\plain \s0\nowidctlpar{\*\hyphen2\hyphlead2\hyphtrail2\hyphmax0}\cf0\kerning1\dbch\af7\langfe2052\dbch\af8\afs24\alang1081\loch\f3\fs24\lang1033\ql\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640{{\field{\*\fldinst HYPERLINK "https://www.kyngchaos.com" }{\fldrslt \cf2\ul\ulc0\langfe255\alang255\lang255\cf1\i0\ulnone\ulc0\b0\rtlch \ltrch\loch\fs28\loch\f5 +https://www.kyngchaos.com/}}} \par } diff --git a/man/Makefile b/man/Makefile index 1a189c8d7c8..4ad90b29a74 100644 --- a/man/Makefile +++ b/man/Makefile @@ -12,7 +12,11 @@ DSTFILES := \ $(HTMLDIR)/grass_icon.png \ $(HTMLDIR)/jquery.fixedheadertable.min.js \ $(HTMLDIR)/parser_standard_options.css \ - $(HTMLDIR)/parser_standard_options.js + $(HTMLDIR)/parser_standard_options.js \ + $(MDDIR)/mkdocs.yml \ + $(MDDIR)/source/grass_logo.png \ + $(MDDIR)/source/grassdocs.css \ + $(MDDIR)/overrides/partials/footer.html categories = \ d:display \ @@ -34,7 +38,11 @@ IDXSRC = full_index index topics keywords graphical_index manual_gallery class_g INDICES := $(patsubst %,$(HTMLDIR)/%.html,$(IDXSRC)) +IDXSRC_MD = full_index index topics keywords graphical_index manual_gallery parser_standard_options $(IDXCATS) +INDICES_MD := $(patsubst %,$(MDDIR)/source/%.md,$(IDXSRC_MD)) + ALL_HTML := $(wildcard $(HTMLDIR)/*.*.html) +ALL_MD := $(wildcard $(MDDIR)/source/*.*.html) ifneq (@(type sphinx-build2 > /dev/null),) SPHINXBUILD = sphinx-build2 @@ -44,10 +52,12 @@ SPHINXBUILD = sphinx-build endif default: $(DSTFILES) - @echo "Generating HTML manual pages index (help system)..." + @echo "Generating manual pages index (help system)..." $(MAKE) $(INDICES) $(call build,check) $(MAKE) manpages + $(MAKE) $(INDICES_MD) +# $(MAKE) build-mkdocs # This must be a separate target so that evaluation of $(MANPAGES) # is delayed until the indices have been generated @@ -60,32 +70,37 @@ manpages: define build GISBASE="$(RUN_GISBASE)" ARCH="$(ARCH)" ARCH_DISTDIR="$(ARCH_DISTDIR)" \ + MDDIR="${MDDIR}" \ VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) \ $(PYTHON) ./build_$(1).py $(2) endef define build_topics GISBASE="$(RUN_GISBASE)" ARCH="$(ARCH)" ARCH_DISTDIR="$(ARCH_DISTDIR)" \ + MDDIR="${MDDIR}" \ VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) \ - $(PYTHON) ./build_topics.py $(HTMLDIR) + $(PYTHON) ./build_topics.py endef define build_keywords GISBASE="$(RUN_GISBASE)" ARCH="$(ARCH)" ARCH_DISTDIR="$(ARCH_DISTDIR)" \ + MDDIR="${MDDIR}" \ VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) \ - $(PYTHON) ./build_keywords.py $(HTMLDIR) + $(PYTHON) ./build_keywords.py endef define build_graphical_index GISBASE="$(RUN_GISBASE)" ARCH="$(ARCH)" ARCH_DISTDIR="$(ARCH_DISTDIR)" \ + MDDIR="${MDDIR}" \ VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) \ - $(PYTHON) ./build_graphical_index.py $(HTMLDIR) + $(PYTHON) ./build_graphical_index.py endef define build_manual_gallery GISBASE="$(RUN_GISBASE)" ARCH="$(ARCH)" ARCH_DISTDIR="$(ARCH_DISTDIR)" \ + MDDIR="${MDDIR}" \ VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) \ - $(PYTHON) ./build_manual_gallery.py $(HTMLDIR) + $(PYTHON) ./build_manual_gallery.py endef define build_pso @@ -95,8 +110,18 @@ GISBASE="$(RUN_GISBASE)" ARCH="$(ARCH)" ARCH_DISTDIR="$(ARCH_DISTDIR)" \ -f "grass" -o "$(HTMLDIR)/parser_standard_options.html" -p 'id="opts_table" class="scroolTable"' endef +define build_pso_md +GISBASE="$(RUN_GISBASE)" ARCH="$(ARCH)" ARCH_DISTDIR="$(ARCH_DISTDIR)" \ + MDDIR="${MDDIR}" \ + VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) \ + $(PYTHON) ./parser_standard_options.py -t "$(GRASS_HOME)/lib/gis/parser_standard_options.c" \ + -f "grass" -o "$(MDDIR)/source/parser_standard_options.md" +endef + $(HTMLDIR)/topics.html: $(ALL_HTML) +$(MDDIR)/source/topics.md: $(ALL_MD) + define build_class_graphical GISBASE="$(RUN_GISBASE)" ARCH="$(ARCH)" ARCH_DISTDIR="$(ARCH_DISTDIR)" \ VERSION_NUMBER=$(GRASS_VERSION_NUMBER) VERSION_DATE=$(GRASS_VERSION_DATE) \ @@ -107,30 +132,56 @@ $(HTMLDIR)/topics.html: $(ALL_HTML) build_topics.py $(call build_topics) touch $@ +$(MDDIR)/source/topics.md: $(ALL_MD) build_topics.py + $(call build_topics) + touch $@ + $(HTMLDIR)/full_index.html: $(ALL_HTML) build_full_index.py build_html.py $(call build,full_index) touch $@ +$(MDDIR)/source/full_index.md: $(ALL_MD) build_full_index.py build_html.py + $(call build,full_index) + touch $@ + $(HTMLDIR)/index.html: build_index.py build_html.py $(call build,index) touch $@ +$(MDDIR)/source/index.md: build_index.py build_md.py + $(call build,index) + touch $@ + $(HTMLDIR)/keywords.html: $(ALL_HTML) $(call build_keywords) touch $@ +$(MDDIR)/source/keywords.md: $(ALL_MD) + $(call build_keywords) + touch $@ $(HTMLDIR)/graphical_index.html: $(ALL_HTML) $(call build_graphical_index) touch $@ +$(MDDIR)/source/graphical_index.md: $(ALL_MD) + $(call build_graphical_index) + touch $@ + $(HTMLDIR)/manual_gallery.html: $(ALL_HTML) $(call build_manual_gallery) +$(MDDIR)/source/manual_gallery.md: $(ALL_MD) + $(call build_manual_gallery) + $(HTMLDIR)/parser_standard_options.html: $(ALL_HTML) $(call build_pso) touch $@ +$(MDDIR)/source/parser_standard_options.md: $(ALL_MD) + $(call build_pso_md) + touch $@ + # TODO: this should be done in the same way as category_rule $(HTMLDIR)/class_graphical.html: $(ALL_HTML) $(call build_class_graphical) @@ -144,12 +195,24 @@ endef $(foreach cat,$(categories),$(eval $(call category_rule,$(firstword $(subst :, ,$(cat))),$(lastword $(subst :, ,$(cat)))))) +define category_rule_md +$$(MDDIR)/source/$(2).md: $$(wildcard $$(MDDIR)/source/$(1).*.md) build_class.py build_md.py + $$(call build,class,$(1) $(2)) + touch $$@ +endef + +$(foreach cat,$(categories),$(eval $(call category_rule_md,$(firstword $(subst :, ,$(cat))),$(lastword $(subst :, ,$(cat)))))) + + $(HTMLDIR)/grassdocs.css: grassdocs.css $(INSTALL_DATA) $< $@ $(HTMLDIR)/grass_logo.png: grass_logo.png $(INSTALL_DATA) $< $@ +$(MDDIR)/source/grass_logo.png: grass_logo.png + $(INSTALL_DATA) $< $@ + $(HTMLDIR)/hamburger_menu.svg: hamburger_menu.svg $(INSTALL_DATA) $< $@ @@ -167,3 +230,20 @@ $(HTMLDIR)/parser_standard_options.js: parser_standard_options.js $(HTMLDIR)/parser_standard_options.css: parser_standard_options.css $(INSTALL_DATA) $< $@ + +$(MDDIR)/mkdocs.yml: mkdocs/mkdocs.yml + $(INSTALL_DATA) $< $@ + +$(MDDIR)/source/grassdocs.css: mkdocs/grassdocs.css + $(INSTALL_DATA) $< $@ + +$(MDDIR)/overrides/partials/footer.html: mkdocs/overrides/partials/footer.html | $(MDDIR)/overrides/partials + $(INSTALL_DATA) $< $@ + +$(MDDIR)/overrides/partials: + $(MKDIR) $@ + +build-mkdocs: + @cd $(MDDIR) ; SITE_NAME="GRASS GIS $(GRASS_VERSION_NUMBER) Reference Manual" \ + COPYRIGHT="© 2003-$(GRASS_VERSION_DATE) GRASS Development Team, GRASS GIS $(GRASS_VERSION_NUMBER) Reference Manual" \ + mkdocs build diff --git a/man/build.py b/man/build.py new file mode 100644 index 00000000000..9cc47446a2a --- /dev/null +++ b/man/build.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python3 + +# utilities for generating HTML indices +# (C) 2003-2025 Markus Neteler and the GRASS Development Team +# Authors: +# Markus Neteler +# Glynn Clements +# Luca Delucchi + +import os +import string +from datetime import datetime +from pathlib import Path + +# TODO: better fix this in include/Make/Html.make, see bug RT #5361 + +# exclude following list of modules from help index: + +exclude_mods = [ + "i.find", + "r.watershed.ram", + "r.watershed.seg", + "v.topo.check", + "helptext.html", +] + +# these modules don't use G_parser() + +desc_override = { + "g.parser": "Provides automated parser, GUI, and help support for GRASS scipts.", + "r.li.daemon": "Support module for r.li landscape index calculations.", +} + +# File template pieces follow + +message_tmpl = string.Template( + r"""Generated HTML docs in ${man_dir}/index.html +---------------------------------------------------------------------- +Following modules are missing the 'modulename.html' file in src code: +""" +) + +############################################################################ + + +def check_for_desc_override(basename): + return desc_override.get(basename) + + +def read_file(name): + return Path(name).read_text() + + +def write_file(name, contents): + Path(name).write_text(contents) + + +def try_mkdir(path): + try: + os.mkdir(path) + except OSError: + pass + + +def replace_file(name): + temp = name + ".tmp" + if ( + os.path.exists(name) + and os.path.exists(temp) + and read_file(name) == read_file(temp) + ): + os.remove(temp) + else: + try: + os.remove(name) + except OSError: + pass + os.rename(temp, name) + + +def copy_file(src, dst): + write_file(dst, read_file(src)) + + +def get_files(man_dir, cls=None, ignore_gui=True, extension="html"): + for cmd in sorted(os.listdir(man_dir)): + if ( + cmd.endswith(f".{extension}") + and (cls in (None, "*") or cmd.startswith(cls + ".")) + and (cls != "*" or len(cmd.split(".")) >= 3) + and cmd not in [f"full_index.{extension}", f"index.{extension}"] + and cmd not in exclude_mods + and ((ignore_gui and not cmd.startswith("wxGUI.")) or not ignore_gui) + ): + yield cmd + + +def write_header(f, title, ismain=False, body_width="99%", template="html"): + if template == "html": + from build_html import header1_tmpl, macosx_tmpl, header2_tmpl + else: + from build_md import header1_tmpl, macosx_tmpl, header2_tmpl + f.write(header1_tmpl.substitute(title=title)) + if ismain and macosx: + f.write( + macosx_tmpl.substitute(grass_version=grass_version, grass_mmver=grass_mmver) + ) + f.write(header2_tmpl.substitute(grass_version=grass_version, body_width=body_width)) + + +def write_cmd_overview(f, template="html"): + if template == "html": + from build_html import overview_tmpl + else: + from build_md import overview_tmpl + f.write( + overview_tmpl.substitute( + grass_version_major=grass_version_major, + grass_version_minor=grass_version_minor, + ) + ) + + +def write_footer(f, index_url, year=None, template="html"): + if template == "html": + from build_html import footer_tmpl + else: + from build_md import footer_tmpl + cur_year = default_year if year is None else year + f.write( + footer_tmpl.substitute( + grass_version=grass_version, index_url=index_url, year=cur_year + ) + ) + + +def to_title(name): + """Convert name of command class/family to form suitable for title""" + if name == "PostScript": + return name + return name.capitalize() + + +############################################################################ + +arch_dist_dir = os.environ["ARCH_DISTDIR"] +gisbase = os.environ["GISBASE"] +grass_version = os.getenv("VERSION_NUMBER", "unknown") +grass_version_major = grass_version.split(".")[0] +grass_version_minor = grass_version.split(".")[1] +grass_mmver = ".".join(grass_version.split(".")[0:2]) +macosx = "darwin" in os.environ["ARCH"].lower() +default_year = os.getenv("VERSION_DATE") +if not default_year: + default_year = str(datetime.now().year) + +############################################################################ diff --git a/man/build_check.py b/man/build_check.py index 9d9675fb7ac..1dc578c56af 100644 --- a/man/build_check.py +++ b/man/build_check.py @@ -9,13 +9,14 @@ import sys import os -from build_html import html_dir, message_tmpl, html_files, read_file +from build import message_tmpl, get_files, read_file +from build_html import man_dir -os.chdir(html_dir) +os.chdir(man_dir) -sys.stdout.write(message_tmpl.substitute(html_dir=html_dir)) +sys.stdout.write(message_tmpl.substitute(man_dir=man_dir)) -for cmd in html_files("*"): +for cmd in get_files(man_dir, "*"): if "DESCRIPTION" not in read_file(cmd): sys.stdout.write("%s\n" % cmd[:-5]) diff --git a/man/build_class.py b/man/build_class.py index 564fb5c20e6..7dc53c121bf 100644 --- a/man/build_class.py +++ b/man/build_class.py @@ -9,67 +9,84 @@ import sys import os -from build_html import ( - html_dir, - write_html_header, - grass_version, - modclass_intro_tmpl, - modclass_tmpl, - to_title, - html_files, - check_for_desc_override, - get_desc, - desc2_tmpl, - write_html_footer, - replace_file, -) +no_intro_page_classes = ["display", "general", "miscellaneous", "postscript"] -no_intro_page_classes = ["display", "general", "miscellaneous", "postscript"] +def build_class(ext): + if ext == "html": + from build_html import ( + modclass_tmpl, + get_desc, + desc2_tmpl, + modclass_intro_tmpl, + man_dir, + ) + else: + from build_md import ( + modclass_tmpl, + get_desc, + desc2_tmpl, + modclass_intro_tmpl, + man_dir, + ) -os.chdir(html_dir) + os.chdir(man_dir) -# write separate module pages: + filename = modclass + f".{ext}" + with open(filename + ".tmp", "w") as f: + write_header( + f, + "{} modules - GRASS GIS {} Reference Manual".format( + modclass.capitalize(), grass_version + ), + template=ext, + ) + modclass_lower = modclass.lower() + modclass_visible = modclass + if modclass_lower not in no_intro_page_classes: + if modclass_visible == "raster3d": + # convert keyword to nice form + modclass_visible = "3D raster" + f.write( + modclass_intro_tmpl.substitute( + modclass=modclass_visible, modclass_lower=modclass_lower + ) + ) + f.write(modclass_tmpl.substitute(modclass=to_title(modclass_visible))) -# for all module groups: -cls = sys.argv[1] -modclass = sys.argv[2] -year = None -if len(sys.argv) > 3: - year = sys.argv[3] + # for all modules: + for cmd in get_files(man_dir, cls, extension=ext): + basename = os.path.splitext(cmd)[0] + desc = check_for_desc_override(basename) + if desc is None: + desc = get_desc(cmd) + f.write(desc2_tmpl.substitute(cmd=cmd, basename=basename, desc=desc)) + if ext == "html": + f.write("
      sqliteData storage in SQLite database files (default DB backend)http://sqlite.org/
      https://sqlite.org/
      dbfData storage in DBF files http://shapelib.maptools.org/dbf_api.html
      pgData storage in PostgreSQL RDBMShttp://postgresql.org/
      https://postgresql.org/
      mysqlData storage in MySQL RDBMShttp://mysql.org/
      https://www.mysql.com/
      odbcData storage via UnixODBC (PostgreSQL, Oracle, etc.) https://www.unixodbc.org/
      \n") -filename = modclass + ".html" + write_footer(f, f"index.{ext}", year, template=ext) -f = open(filename + ".tmp", "w") + replace_file(filename) -write_html_header( - f, - "%s modules - GRASS GIS %s Reference Manual" - % (modclass.capitalize(), grass_version), -) -modclass_lower = modclass.lower() -modclass_visible = modclass -if modclass_lower not in no_intro_page_classes: - if modclass_visible == "raster3d": - # covert keyword to nice form - modclass_visible = "3D raster" - f.write( - modclass_intro_tmpl.substitute( - modclass=modclass_visible, modclass_lower=modclass_lower - ) - ) -f.write(modclass_tmpl.substitute(modclass=to_title(modclass_visible))) -# for all modules: -for cmd in html_files(cls): - basename = os.path.splitext(cmd)[0] - desc = check_for_desc_override(basename) - if desc is None: - desc = get_desc(cmd) - f.write(desc2_tmpl.substitute(cmd=cmd, basename=basename, desc=desc)) -f.write("
  • \n") +if __name__ == "__main__": + # for all module groups: + cls = sys.argv[1] + modclass = sys.argv[2] + year = None + if len(sys.argv) > 3: + year = sys.argv[3] + + from build import ( + grass_version, + to_title, + check_for_desc_override, + replace_file, + get_files, + write_header, + write_footer, + ) -write_html_footer(f, "index.html", year) + build_class("html") -f.close() -replace_file(filename) + build_class("md") diff --git a/man/build_class_graphical.py b/man/build_class_graphical.py index 83338f1942c..0eed97beb46 100644 --- a/man/build_class_graphical.py +++ b/man/build_class_graphical.py @@ -17,20 +17,23 @@ import os import fnmatch -# from build_html import * -from build_html import ( +from build import ( default_year, - header1_tmpl, grass_version, - modclass_intro_tmpl, to_title, - html_files, + get_files, check_for_desc_override, - get_desc, - write_html_footer, + write_footer, replace_file, ) +from build_html import ( + header1_tmpl, + modclass_intro_tmpl, + get_desc, + man_dir, +) + header_graphical_index_tmpl = """\ @@ -128,65 +131,62 @@ def generate_page_for_category( short_family, module_family, imgs, year, skip_no_image=False ): filename = module_family + "_graphical.html" - - output = open(filename + ".tmp", "w") - - output.write( - header1_tmpl.substitute( - title="GRASS GIS %s Reference Manual: Graphical index" % grass_version - ) - ) - output.write(header_graphical_index_tmpl) - - if module_family.lower() not in {"general", "postscript"}: - if module_family == "raster3d": - # covert keyword to nice form - module_family = "3D raster" + with open(filename + ".tmp", "w") as output: output.write( - modclass_intro_tmpl.substitute( - modclass=module_family, modclass_lower=module_family.lower() + header1_tmpl.substitute( + title="GRASS GIS %s Reference Manual: Graphical index" % grass_version ) ) - if module_family == "wxGUI": - output.write("

    wxGUI components:

    ") - elif module_family == "guimodules": - output.write("

    g.gui.* modules:

    ") - else: - output.write("

    {0} modules:

    ".format(to_title(module_family))) - output.write('
      ') - - # for all modules: - for cmd in html_files(short_family, ignore_gui=False): - basename = os.path.splitext(cmd)[0] - desc = check_for_desc_override(basename) - if desc is None: - desc = get_desc(cmd) - img = get_module_image(basename, imgs) - img_class = "linkimg" - if skip_no_image and not img: - continue - if not img: - img = "grass_logo.png" - img_class = "default-img" - if basename.startswith("wxGUI"): - basename = basename.replace(".", " ") - output.write( - "
    • " - '' - '' - '{name} ' - '{desc}' - "" - "
    • ".format( - html=cmd, img=img, name=basename, desc=desc, img_class=img_class + output.write(header_graphical_index_tmpl) + + if module_family.lower() not in {"general", "postscript"}: + if module_family == "raster3d": + # covert keyword to nice form + module_family = "3D raster" + output.write( + modclass_intro_tmpl.substitute( + modclass=module_family, modclass_lower=module_family.lower() + ) + ) + if module_family == "wxGUI": + output.write("

      wxGUI components:

      ") + elif module_family == "guimodules": + output.write("

      g.gui.* modules:

      ") + else: + output.write("

      {0} modules:

      ".format(to_title(module_family))) + output.write('
        ') + + # for all modules: + for cmd in get_files(man_dir, short_family, ignore_gui=False): + basename = os.path.splitext(cmd)[0] + desc = check_for_desc_override(basename) + if desc is None: + desc = get_desc(cmd) + img = get_module_image(basename, imgs) + img_class = "linkimg" + if skip_no_image and not img: + continue + if not img: + img = "grass_logo.png" + img_class = "default-img" + if basename.startswith("wxGUI"): + basename = basename.replace(".", " ") + output.write( + "
      • " + '' + '' + '{name} ' + '{desc}' + "" + "
      • ".format( + html=cmd, img=img, name=basename, desc=desc, img_class=img_class + ) ) - ) - output.write("
      ") + output.write("
    ") - write_html_footer(output, "index.html", year) + write_footer(output, "index.html", year, template="html") - output.close() replace_file(filename) diff --git a/man/build_class_rest.py b/man/build_class_rest.py index 05aec125d04..97d49a5cdb8 100644 --- a/man/build_class_rest.py +++ b/man/build_class_rest.py @@ -32,27 +32,26 @@ modclass = sys.argv[2] filename = modclass + ".txt" - -f = open(filename + ".tmp", "wb") - -write_rest_header(f, "GRASS GIS %s Reference Manual: %s" % (grass_version, modclass)) -if modclass.lower() not in {"general", "miscellaneous", "postscript"}: - f.write( - modclass_intro_tmpl.substitute( - modclass=modclass, modclass_lower=modclass.lower() - ) +with open(filename + ".tmp", "wb") as f: + write_rest_header( + f, "GRASS GIS %s Reference Manual: %s" % (grass_version, modclass) ) -f.write(modclass_tmpl.substitute(modclass=modclass)) + if modclass.lower() not in {"general", "miscellaneous", "postscript"}: + f.write( + modclass_intro_tmpl.substitute( + modclass=modclass, modclass_lower=modclass.lower() + ) + ) + f.write(modclass_tmpl.substitute(modclass=modclass)) -# for all modules: -for cmd in rest_files(cls): - basename = os.path.splitext(cmd)[0] - desc = check_for_desc_override(basename) - if desc is None: - desc = get_desc(cmd) - f.write(desc2_tmpl.substitute(basename=basename, desc=desc)) + # for all modules: + for cmd in rest_files(cls): + basename = os.path.splitext(cmd)[0] + desc = check_for_desc_override(basename) + if desc is None: + desc = get_desc(cmd) + f.write(desc2_tmpl.substitute(basename=basename, desc=desc)) -write_rest_footer(f, "index.txt") + write_rest_footer(f, "index.txt") -f.close() replace_file(filename) diff --git a/man/build_full_index.py b/man/build_full_index.py index 7d2ce03e342..a47630e3e31 100644 --- a/man/build_full_index.py +++ b/man/build_full_index.py @@ -11,77 +11,98 @@ from operator import itemgetter -from build_html import ( - html_dir, - grass_version, - html_files, - write_html_header, - write_html_footer, - check_for_desc_override, - get_desc, - replace_file, - to_title, - full_index_header, - toc, - cmd2_tmpl, - desc1_tmpl, -) - -year = None -if len(sys.argv) > 1: - year = sys.argv[1] - -os.chdir(html_dir) - -# TODO: create some master function/dict somewhere -class_labels = { - "d": "display", - "db": "database", - "g": "general", - "i": "imagery", - "m": "miscellaneous", - "ps": "PostScript", - "r": "raster", - "r3": "3D raster", - "t": "temporal", - "v": "vector", -} - -classes = [] -for cmd in html_files("*"): - prefix = cmd.split(".")[0] - if prefix not in [item[0] for item in classes]: - classes.append((prefix, class_labels.get(prefix, prefix))) -classes.sort(key=itemgetter(0)) - -# begin full index: -filename = "full_index.html" -f = open(filename + ".tmp", "w") - -write_html_header( - f, "GRASS GIS %s Reference Manual: Full index" % grass_version, body_width="80%" -) - -# generate main index of all modules: -f.write(full_index_header) - -f.write(toc) - -# for all module groups: -for cls, cls_label in classes: - f.write(cmd2_tmpl.substitute(cmd_label=to_title(cls_label), cmd=cls)) - # for all modules: - for cmd in html_files(cls): - basename = os.path.splitext(cmd)[0] - desc = check_for_desc_override(basename) - if desc is None: - desc = get_desc(cmd) - f.write(desc1_tmpl.substitute(cmd=cmd, basename=basename, desc=desc)) - f.write("\n") - -write_html_footer(f, "index.html", year) - -f.close() -replace_file(filename) - -# done full index + +def build_full_index(ext): + if ext == "html": + from build_html import ( + man_dir, + full_index_header, + cmd2_tmpl, + desc1_tmpl, + get_desc, + toc, + ) + else: + from build_md import ( + man_dir, + full_index_header, + cmd2_tmpl, + desc1_tmpl, + get_desc, + ) + + os.chdir(man_dir) + + # TODO: create some master function/dict somewhere + class_labels = { + "d": "display", + "db": "database", + "g": "general", + "i": "imagery", + "m": "miscellaneous", + "ps": "PostScript", + "r": "raster", + "r3": "3D raster", + "t": "temporal", + "v": "vector", + } + + classes = [] + for cmd in get_files(man_dir, "*", extension=ext): + prefix = cmd.split(".")[0] + if prefix not in [item[0] for item in classes]: + classes.append((prefix, class_labels.get(prefix, prefix))) + classes.sort(key=itemgetter(0)) + + # begin full index: + filename = f"full_index.{ext}" + with open(filename + ".tmp", "w") as f: + write_header( + f, + "GRASS GIS {} Reference Manual - Full index".format(grass_version), + body_width="80%", + template=ext, + ) + + # generate main index of all modules: + f.write(full_index_header) + + if ext == "html": + f.write(toc) + + # for all module groups: + for cls, cls_label in classes: + f.write(cmd2_tmpl.substitute(cmd_label=to_title(cls_label), cmd=cls)) + # for all modules: + for cmd in get_files(man_dir, cls, extension=ext): + basename = os.path.splitext(cmd)[0] + desc = check_for_desc_override(basename) + if desc is None: + desc = get_desc(cmd) + f.write(desc1_tmpl.substitute(cmd=cmd, basename=basename, desc=desc)) + if ext == "html": + f.write("\n") + + write_footer(f, f"index.{ext}", year, template=ext) + + replace_file(filename) + + +if __name__ == "__main__": + year = None + if len(sys.argv) > 1: + year = sys.argv[1] + + from build import ( + get_files, + write_footer, + write_header, + to_title, + grass_version, + check_for_desc_override, + replace_file, + ) + + build_full_index("html") + + build_full_index("md") diff --git a/man/build_full_index_rest.py b/man/build_full_index_rest.py index 26183505393..c877d914cc0 100644 --- a/man/build_full_index_rest.py +++ b/man/build_full_index_rest.py @@ -34,34 +34,32 @@ # begin full index: filename = "full_index.txt" -f = open(filename + ".tmp", "wb") +with open(filename + ".tmp", "wb") as f: + write_rest_header(f, "GRASS GIS %s Reference Manual: Full index" % grass_version) -write_rest_header(f, "GRASS GIS %s Reference Manual: Full index" % grass_version) + # generate main index of all modules: + f.write(full_index_header) + # " -# generate main index of all modules: -f.write(full_index_header) -# " + # for cls in classes: + # f.write(cmd1_tmpl.substitute(cmd = cls)) + # if cls != classes[-1]: + # f.write(" | ") -# for cls in classes: -# f.write(cmd1_tmpl.substitute(cmd = cls)) -# if cls != classes[-1]: -# f.write(" | ") + f.write(sections) -f.write(sections) + # for all module groups: + for cls in classes: + f.write(cmd2_tmpl.substitute(cmd=cls)) + # for all modules: + for cmd in rest_files(cls): + basename = os.path.splitext(cmd)[0] + desc = check_for_desc_override(basename) + if desc is None: + desc = get_desc(cmd) + f.write(desc1_tmpl.substitute(basename=basename, desc=desc)) + f.write("\n") -# for all module groups: -for cls in classes: - f.write(cmd2_tmpl.substitute(cmd=cls)) - # for all modules: - for cmd in rest_files(cls): - basename = os.path.splitext(cmd)[0] - desc = check_for_desc_override(basename) - if desc is None: - desc = get_desc(cmd) - f.write(desc1_tmpl.substitute(basename=basename, desc=desc)) - f.write("\n") + write_rest_footer(f, "index.txt") -write_rest_footer(f, "index.txt") - -f.close() replace_file(filename) diff --git a/man/build_graphical_index.py b/man/build_graphical_index.py index 04e981c021e..bc7dd5299d9 100755 --- a/man/build_graphical_index.py +++ b/man/build_graphical_index.py @@ -5,7 +5,7 @@ # MODULE: build_graphical_index # AUTHOR(S): Vaclav Petras # PURPOSE: Build graphical index -# COPYRIGHT: (C) 2015-2024 by Vaclav Petras and the GRASS Development Team +# COPYRIGHT: (C) 2015-2025 by Vaclav Petras and the GRASS Development Team # # This program is free software under the GNU General Public # License (>=v2). Read the file COPYING that comes with GRASS @@ -14,73 +14,11 @@ ############################################################################# import os -import sys -from build_html import write_html_footer, grass_version, header1_tmpl - - -output_name = "graphical_index.html" +output_name = "graphical_index" year = os.getenv("VERSION_DATE") -# other similar strings are in a different file -# TODO: all HTML manual building needs refactoring (perhaps grass.tools?) -header_graphical_index_tmpl = """\ - - - - -
    - -GRASS logo -
    -

    Graphical index of GRASS GIS modules

    -""" - def std_img_name(name): return "gi_{0}.jpg".format(name) @@ -114,30 +52,57 @@ def std_img_name(name): ] -def main(): - html_dir = sys.argv[1] +def main(ext): + if ext == "html": + from build_html import ( + header1_tmpl, + header_graphical_index_tmpl, + man_dir, + ) + else: + from build_md import ( + header1_tmpl, + header_graphical_index_tmpl, + man_dir, + ) - with open(os.path.join(html_dir, output_name), "w") as output: + with open(os.path.join(man_dir, output_name + f".{ext}"), "w") as output: output.write( header1_tmpl.substitute( - title="GRASS GIS %s Reference " - "Manual: Graphical index" % grass_version + title=f"GRASS GIS {grass_version} Reference Manual - Graphical index" ) ) output.write(header_graphical_index_tmpl) - output.write('
      \n') + if ext == "html": + output.write('
        \n') for html_file, image, label in index_items: - output.write( - "
      • " - '' - '' - '{name}' - "" - "
      • \n".format(html=html_file, img=image, name=label) - ) - output.write("
      ") - write_html_footer(output, "index.html", year) + if ext == "html": + output.write( + "
    • " + '' + '' + '{name}' + "" + "
    • \n".format(html=html_file, img=image, name=label) + ) + else: + output.write( + "- [![{name}]({img})]({link})".format( + link=html_file, img=image, name=label + ) + ) + + if ext == "html": + output.write("
    ") + write_footer(output, f"index.{ext}", year, template=ext) if __name__ == "__main__": - main() + from build import ( + write_footer, + grass_version, + ) + + main("html") + + main("md") diff --git a/man/build_html.py b/man/build_html.py index a8cc4c057a5..8116c8fccc5 100644 --- a/man/build_html.py +++ b/man/build_html.py @@ -1,34 +1,6 @@ -#!/usr/bin/env python3 - -# utilities for generating HTML indices -# (C) 2003-2024 Markus Neteler and the GRASS Development Team -# Authors: -# Markus Neteler -# Glynn Clements -# Luca Delucchi - import os import string -from datetime import datetime - -# TODO: better fix this in include/Make/Html.make, see bug RT #5361 - -# exclude following list of modules from help index: - -exclude_mods = [ - "i.find", - "r.watershed.ram", - "r.watershed.seg", - "v.topo.check", - "helptext.html", -] - -# these modules don't use G_parser() - -desc_override = { - "g.parser": "Provides automated parser, GUI, and help support for GRASS scipts.", - "r.li.daemon": "Support module for r.li landscape index calculations.", -} +from pathlib import Path # File template pieces follow @@ -108,7 +80,8 @@

     Graphical User Interface

    @@ -310,15 +283,6 @@ """ # " - -message_tmpl = string.Template( - r"""Generated HTML docs in ${html_dir}/index.html ----------------------------------------------------------------------- -Following modules are missing the 'modulename.html' file in src code: -""" -) -# " - moduletopics_tmpl = string.Template( r"""
  • ${name}
  • @@ -385,136 +349,90 @@ """ # " -############################################################################ - - -def check_for_desc_override(basename): - return desc_override.get(basename) - - -def read_file(name): - f = open(name) - s = f.read() - f.close() - return s - - -def write_file(name, contents): - f = open(name, "w") - f.write(contents) - f.close() - - -def try_mkdir(path): - try: - os.mkdir(path) - except OSError: - pass - - -def replace_file(name): - temp = name + ".tmp" - if ( - os.path.exists(name) - and os.path.exists(temp) - and read_file(name) == read_file(temp) - ): - os.remove(temp) - else: - try: - os.remove(name) - except OSError: - pass - os.rename(temp, name) - - -def copy_file(src, dst): - write_file(dst, read_file(src)) - +# TODO: all HTML manual building needs refactoring (perhaps grass.tools?) +header_graphical_index_tmpl = """\ + + + + +
    -def write_html_footer(f, index_url, year=None): - cur_year = default_year if year is None else year - f.write( - footer_tmpl.substitute( - grass_version=grass_version, index_url=index_url, year=cur_year - ) - ) +GRASS logo +
    +

    Graphical index of GRASS GIS modules

    +""" def get_desc(cmd): - f = open(cmd) - while True: - line = f.readline() - if not line: - return "" - if "NAME" in line: - break - - while True: - line = f.readline() - if not line: - return "" - if "SYNOPSIS" in line: - break - if "" in line: - sp = line.split("-", 1) - if len(sp) > 1: - return sp[1].strip() - return None + with Path(cmd).open() as f: + while True: + line = f.readline() + if not line: + return "" + if "NAME" in line: + break + + while True: + line = f.readline() + if not line: + return "" + if "SYNOPSIS" in line: + break + if "" in line: + sp = line.split("-", 1) + if len(sp) > 1: + return sp[1].strip() + return None return "" -def to_title(name): - """Convert name of command class/family to form suitable for title""" - if name == "PostScript": - return name - return name.capitalize() - - ############################################################################ -arch_dist_dir = os.environ["ARCH_DISTDIR"] -html_dir = os.path.join(arch_dist_dir, "docs", "html") -gisbase = os.environ["GISBASE"] -grass_version = os.getenv("VERSION_NUMBER", "unknown") -grass_version_major = grass_version.split(".")[0] -grass_version_minor = grass_version.split(".")[1] -grass_mmver = ".".join(grass_version.split(".")[0:2]) -macosx = "darwin" in os.environ["ARCH"].lower() -default_year = os.getenv("VERSION_DATE") -if not default_year: - default_year = str(datetime.now().year) +man_dir = os.path.join(os.environ["ARCH_DISTDIR"], "docs", "html") ############################################################################ diff --git a/man/build_index.py b/man/build_index.py index 12de7f01162..ef4dbc0dcc1 100644 --- a/man/build_index.py +++ b/man/build_index.py @@ -9,26 +9,42 @@ import sys import os -from build_html import ( - html_dir, - grass_version, - write_html_header, - write_html_cmd_overview, - write_html_footer, +from build import ( + write_header, + write_cmd_overview, + write_footer, replace_file, + grass_version, ) -os.chdir(html_dir) - -filename = "index.html" -f = open(filename + ".tmp", "w") - year = None if len(sys.argv) > 1: year = sys.argv[1] -write_html_header(f, "GRASS GIS %s Reference Manual" % grass_version, True) -write_html_cmd_overview(f) -write_html_footer(f, "index.html", year) -f.close() -replace_file(filename) + +def build_index(ext): + if ext == "html": + from build_html import ( + man_dir, + ) + else: + from build_md import ( + man_dir, + ) + + filename = f"index.{ext}" + os.chdir(man_dir) + with open(filename + ".tmp", "w") as f: + write_header( + f, f"GRASS GIS {grass_version} Reference Manual", True, template=ext + ) + write_cmd_overview(f) + write_footer(f, f"index.{ext}", year, template=ext) + replace_file(filename) + + +if __name__ == "__main__": + + build_index("html") + + build_index("md") diff --git a/man/build_index_rest.py b/man/build_index_rest.py index b919042e4af..0986838b19f 100644 --- a/man/build_index_rest.py +++ b/man/build_index_rest.py @@ -21,11 +21,9 @@ os.chdir(rest_dir) filename = "index.txt" -f = open(filename + ".tmp", "w") +with open(filename + ".tmp", "w") as f: + write_rest_header(f, "GRASS GIS %s Reference Manual" % grass_version, True) + write_rest_cmd_overview(f) + write_rest_footer(f, "index.txt") -write_rest_header(f, "GRASS GIS %s Reference Manual" % grass_version, True) -write_rest_cmd_overview(f) -write_rest_footer(f, "index.txt") - -f.close() replace_file(filename) diff --git a/man/build_keywords.py b/man/build_keywords.py index b0dfe95e0e3..546eed62f37 100644 --- a/man/build_keywords.py +++ b/man/build_keywords.py @@ -21,13 +21,6 @@ import os import sys import glob -from build_html import ( - grass_version, - header1_tmpl, - headerkeywords_tmpl, - write_html_footer, -) - blacklist = [ "Display", @@ -42,34 +35,25 @@ "Vector", ] -path = sys.argv[1] addons_path = None -if len(sys.argv) >= 3: - addons_path = sys.argv[2] +if len(sys.argv) >= 2: + addons_path = sys.argv[1] year = os.getenv("VERSION_DATE") -keywords = {} - -htmlfiles = glob.glob(os.path.join(path, "*.html")) -if addons_path: - addons_man_files = glob.glob(os.path.join(addons_path, "*.html")) - htmlfiles.extend(addons_man_files) - -char_list = {} - -def get_module_man_html_file_path(module): +def get_module_man_file_path(man_dir, module, addons_man_files): """Get module manual HTML file path :param str module: module manual HTML file name e.g. v.surf.rst.html + :param addons_man_files: list of HTML manual files :return str module_path: core/addon module manual HTML file path """ if addons_path and module in ",".join(addons_man_files): module_path = os.path.join(addons_path, module) module_path = module_path.replace( - os.path.commonpath([path, module_path]), + os.path.commonpath([man_dir, module_path]), ".", ) else: @@ -77,97 +61,139 @@ def get_module_man_html_file_path(module): return module_path -for html_file in htmlfiles: - fname = os.path.basename(html_file) - with open(html_file) as f: - lines = f.readlines() - # TODO maybe move to Python re (regex) - # remove empty lines - lines = [x for x in lines if x != "\n"] - try: - index_keys = lines.index("

    KEYWORDS

    \n") + 1 - index_desc = lines.index("

    NAME

    \n") + 1 - except Exception: - continue - try: - keys = lines[index_keys].split(",") - except Exception: - continue - for key in keys: - key = key.strip() - try: - key = key.split(">")[1].split("<")[0] - except Exception: - pass - if not key: - sys.exit("Empty keyword from file %s line: %s" % (fname, lines[index_keys])) - if key not in keywords.keys(): - keywords[key] = [] - keywords[key].append(fname) - elif fname not in keywords[key]: - keywords[key].append(fname) - -for black in blacklist: - try: - del keywords[black] - except Exception: +def build_keywords(ext): + if ext == "html": + from build_html import header1_tmpl, headerkeywords_tmpl, man_dir + else: + from build_md import ( + header1_tmpl, + headerkeywords_tmpl, + man_dir, + ) + + keywords = {} + + files = glob.glob(os.path.join(man_dir, f"*.{ext}")) + # TODO: add markdown support + if addons_path: + addons_man_files = glob.glob(os.path.join(addons_path, f"*.{ext}")) + files.extend(addons_man_files) + else: + addons_man_files = [] + + char_list = {} + + for in_file in files: + fname = os.path.basename(in_file) + with open(in_file) as f: + lines = f.readlines() + + if ext == "html": + # TODO maybe move to Python re (regex) + try: + index_keys = lines.index("

    KEYWORDS

    \n") + 1 + except Exception: + continue + try: + keys = [] + for k in lines[index_keys].split(","): + keys.append(k.strip().split(">")[1].split("<")[0]) + except Exception: + continue + else: + keys = [] + for line in lines: + if "keywords:" in line: + keys = [x.strip() for x in line.split(":", 1)[1].strip().split(",")] + break + + for key in keys: + if key not in keywords.keys(): + keywords[key] = [] + keywords[key].append(fname) + elif fname not in keywords[key]: + keywords[key].append(fname) + + for black in blacklist: try: - del keywords[black.lower()] + del keywords[black] except Exception: - continue - -for key in sorted(keywords.keys()): - # this list it is useful to create the TOC using only the first - # character for keyword - firstchar = key[0].lower() - if firstchar not in char_list.keys(): - char_list[str(firstchar)] = key - elif firstchar in char_list.keys(): - if key.lower() < char_list[str(firstchar)].lower(): - char_list[str(firstchar.lower())] = key - -keywordsfile = open(os.path.join(path, "keywords.html"), "w") -keywordsfile.write( - header1_tmpl.substitute( - title="GRASS GIS %s Reference Manual: Keywords index" % grass_version + try: + del keywords[black.lower()] + except Exception: + continue + + for key in sorted(keywords.keys()): + # this list it is useful to create the TOC using only the first + # character for keyword + firstchar = key[0].lower() + if firstchar not in char_list.keys(): + char_list[str(firstchar)] = key + elif firstchar in char_list.keys(): + if key.lower() < char_list[str(firstchar)].lower(): + char_list[str(firstchar.lower())] = key + + with open(os.path.join(man_dir, f"keywords.{ext}"), "w") as keywordsfile: + keywordsfile.write( + header1_tmpl.substitute( + title=f"GRASS GIS {grass_version} Reference Manual - Keywords index" + ) + ) + keywordsfile.write(headerkeywords_tmpl) + if ext == "html": + keywordsfile.write("
    ") + sortedKeys = sorted(keywords.keys(), key=lambda s: s.lower()) + + for key in sortedKeys: + if ext == "html": + keyword_line = '
    {key}
    '.format( # noqa: E501 + key=key + ) + else: + keyword_line = f"### **{key}**\n" + for value in sorted(keywords[key]): + man_file_path = get_module_man_file_path( + man_dir, value, addons_man_files + ) + if ext == "html": + keyword_line += f' {value.replace(f".{ext}", "")},' # noqa: E501 + else: + keyword_line += f' [{value.rsplit(".", 1)[0]}]({man_file_path}),' + keyword_line = keyword_line.rstrip(",") + if ext == "html": + keyword_line += "
    " + keyword_line += "\n" + keywordsfile.write(keyword_line) + if ext == "html": + keywordsfile.write("
    \n") + if ext == "html": + # create toc + toc = '
    \n

    Table of contents

    ' # noqa: E501 + test_length = 0 + all_keys = len(char_list.keys()) + for k in sorted(char_list.keys()): + test_length += 1 + # toc += '

  • %s
  • ' % (char_list[k], k) + if test_length % 4 == 0 and test_length != all_keys: + toc += '\n%s, ' % (char_list[k], k) + elif test_length % 4 == 0 and test_length == all_keys: + toc += '\n%s' % (char_list[k], k) + elif test_length == all_keys: + toc += '%s' % (char_list[k], k) + else: + toc += '%s, ' % (char_list[k], k) + toc += "

    \n" + keywordsfile.write(toc) + + write_footer(keywordsfile, f"index.{ext}", year, template=ext) + + +if __name__ == "__main__": + from build import ( + grass_version, + write_footer, ) -) -keywordsfile.write(headerkeywords_tmpl) -keywordsfile.write("
    ") -sortedKeys = sorted(keywords.keys(), key=lambda s: s.lower()) - -for key in sortedKeys: - keyword_line = '
    %s
    ' % ( - key, - key, - ) - for value in sorted(keywords[key]): - keyword_line += ( - f' ' - f'{value.replace(".html", "")},' - ) - keyword_line = keyword_line.rstrip(",") - keyword_line += "
    \n" - keywordsfile.write(keyword_line) -keywordsfile.write("
    \n") -# create toc -toc = '
    \n

    Table of contents

    ' -test_length = 0 -all_keys = len(char_list.keys()) -for k in sorted(char_list.keys()): - test_length += 1 - # toc += '

  • %s
  • ' % (char_list[k], k) - if test_length % 4 == 0 and test_length != all_keys: - toc += '\n%s, ' % (char_list[k], k) - elif test_length % 4 == 0 and test_length == all_keys: - toc += '\n%s' % (char_list[k], k) - elif test_length == all_keys: - toc += '%s' % (char_list[k], k) - else: - toc += '%s, ' % (char_list[k], k) -toc += "

    \n" -keywordsfile.write(toc) + build_keywords("html") -write_html_footer(keywordsfile, "index.html", year) -keywordsfile.close() + build_keywords("md") diff --git a/man/build_manual_gallery.py b/man/build_manual_gallery.py index a8b077be8f9..a0f8176a246 100755 --- a/man/build_manual_gallery.py +++ b/man/build_manual_gallery.py @@ -12,17 +12,16 @@ # for details. # ############################################################################# +from __future__ import annotations import os from pathlib import Path -import sys import fnmatch import re +from typing import TYPE_CHECKING -from build_html import write_html_footer, grass_version, header1_tmpl - - -output_name = "manual_gallery.html" +if TYPE_CHECKING: + from collections.abc import Iterable img_extensions = ["png", "jpg", "gif"] img_patterns = ["*." + extension for extension in img_extensions] @@ -94,14 +93,18 @@ """ -def img_in_html(filename, imagename) -> bool: +def img_in_file(filename: str | os.PathLike[str], imagename: str, ext: str) -> bool: # for some reason, calling search just once is much faster # than calling it on every line (time is spent in _compile) - pattern = re.compile("".format(imagename)) + if ext == "html": + pattern = re.compile("".format(imagename)) + else: + # expecting markdown + pattern = re.compile(r"!\[(.*?)\]\({0}\)".format(imagename)) return bool(re.search(pattern, Path(filename).read_text())) -def file_matches(filename, patterns): +def file_matches(filename: str, patterns: Iterable[str]): return any(fnmatch.fnmatch(filename, pattern) for pattern in patterns) @@ -135,51 +138,82 @@ def title_from_names(module_name, img_name): return "{name}".format(name=module_name) -def get_module_name(filename): - return filename.replace(".html", "") +def get_module_name(filename, ext): + return filename.replace(f".{ext}", "") + +def main(ext): + if ext == "html": + from build_html import ( + header1_tmpl, + man_dir, + ) + else: + from build_md import ( + header1_tmpl, + man_dir, + ) -def main(): - html_dir = sys.argv[1] + output_name = f"manual_gallery.{ext}" - html_files = get_files( - html_dir, - ["*.html"], - exclude_patterns=[output_name, "*_graphical.html", "graphical_index.html"], + man_files = get_files( + man_dir, + [f"*.{ext}"], + exclude_patterns=[output_name, f"*_graphical.{ext}", f"graphical_index.{ext}"], ) - img_html_files = {} + img_files = {} - for filename in os.listdir(html_dir): + for filename in os.listdir(man_dir): if filename in img_blacklist: continue if file_matches(filename, img_patterns): - for html_file in html_files: - if img_in_html(os.path.join(html_dir, html_file), filename): - img_html_files[filename] = html_file - # for now suppose one image per html + for man_filename in man_files: + if img_in_file(Path(man_dir, man_filename), filename, ext): + img_files[filename] = man_filename + # for now suppose one image per manual filename - with open(os.path.join(html_dir, output_name), "w") as output: + with open(Path(man_dir, output_name), "w") as output: output.write( header1_tmpl.substitute( title="GRASS GIS %s Reference Manual: Manual gallery" % grass_version ) ) - output.write(header_graphical_index_tmpl) - output.write('
      \n') - for image, html_file in sorted(img_html_files.items()): - name = get_module_name(html_file) + if ext == "html": + output.write(header_graphical_index_tmpl) + output.write('
        \n') + for image, filename in sorted(img_files.items()): + name = get_module_name(filename, ext) title = title_from_names(name, image) - output.write( - "
      • " - '' - '' - '{name}' - "" - "
      • \n".format(html=html_file, img=image, title=title, name=name) - ) - output.write("
      ") - write_html_footer(output, "index.html", year) + if ext == "html": + output.write( + "
    • " + '' + '' + '{name}' + "" + "
    • \n".format(fn=filename, img=image, title=title, name=name) + ) + else: + output.write(f'[![{name}]({image} "{title}")]({filename})\n') + if ext == "html": + output.write("
    ") + write_footer(output, f"index.{ext}", year) + + return img_files if __name__ == "__main__": - main() + from build import ( + write_footer, + grass_version, + ) + + img_files_html = main("html") + + img_files_md = main("md") + + # TODO: img_files_html and img_files_md should be the same + # remove lines when fixed + for k in img_files_html: + if k not in img_files_md: + print(k) diff --git a/man/build_md.py b/man/build_md.py new file mode 100644 index 00000000000..ff3da882ab8 --- /dev/null +++ b/man/build_md.py @@ -0,0 +1,267 @@ +import os +import string + +# File template pieces follow + +header1_tmpl = string.Template( + r"""--- +title: ${title} +author: GRASS Development Team +--- + +""" +) + +macosx_tmpl = string.Template( + r""" +AppleTitle: GRASS GIS ${grass_version} +AppleIcon: GRASS-${grass_mmver}/grass_icon.png +""" +) + +header2_tmpl = string.Template( + r"""# GRASS GIS ${grass_version} Reference Manual + +**Geographic Resources Analysis Support System**, commonly +referred to as [GRASS GIS](https://grass.osgeo.org), is a +[Geographic Information System](https://en.wikipedia.org/wiki/Geographic_information_system) +(GIS) used for geospatial data management and +analysis, image processing, graphics/maps production, spatial +modeling, and visualization. GRASS is currently used in academic and +commercial settings around the world, as well as by many governmental +agencies and environmental consulting companies. + +This reference manual details the use of modules distributed with +Geographic Resources Analysis Support System (GRASS), an open source +([GNU GPLed](https://www.gnu.org/licenses/gpl.html), image +processing and geographic information system (GIS). + +""" +) + +# TODO: avoid HTML tags +overview_tmpl = string.Template( + r""" + + + + + + + + + + + + + + + + + + + + + + + + +

     Quick Introduction

    + +

    +

    +

    +

    +

    +

    +

     Graphical User Interface

    + + + +

     Display

    + +

     General

    + +

     Addons

    + +

     Programmer's Manual

    + +

     Raster processing

    + +

     3D raster processing

    +

     Image processing

    +

     Vector processing

    +

     Database

    + +

     Temporal processing

    + +

     Cartography

    + +

     Miscellaneous & Variables

    + +

     Python

    + +
    +""" +) + +# footer_tmpl = string.Template( +# r""" +# ____ +# [Main index](${index_url}) | +# [Topics index](topics.md) | +# [Keywords index](keywords.md) | +# [Graphical index](graphical_index.md) | +# [Full index](full_index.md) + +# © 2003-${year} +# [GRASS Development Team](https://grass.osgeo.org), +# GRASS GIS ${grass_version} Reference Manual +# """ +# ) +# replaced by footer +footer_tmpl = string.Template("") + +cmd2_tmpl = string.Template( + r""" +### ${cmd_label} commands (${cmd}.*) + +| Module | Description | +|--------|-------------| +""" +) + +desc1_tmpl = string.Template( + r"""| [${basename}](${cmd}) | ${desc} | +""" +) + +modclass_intro_tmpl = string.Template( + r"""Go to [${modclass} introduction](${modclass_lower}intro.md) | [topics](topics.md) +""" +) +# " + + +modclass_tmpl = string.Template( + r"""Go [back to help overview](index.md) +### ${modclass} commands +| Module | Description | +|--------|-------------| +""" +) + +desc2_tmpl = string.Template( + r"""| [${basename}](${cmd}) | ${desc} | +""" +) + +full_index_header = r"""Go [back to help overview](index.md) +""" + +moduletopics_tmpl = string.Template( + r""" +- [${name}](topic_${key}.md) +""" +) + +headertopics_tmpl = r"""# Topics +""" + +headerkeywords_tmpl = r"""# Keywords - Index of GRASS GIS modules +""" + +headerkey_tmpl = string.Template( + r"""# Topic: ${keyword} + +| Module | Description | +|--------|-------------| +""" +) + + +headerpso_tmpl = r""" +## Parser standard options +""" + +header_graphical_index_tmpl = """# Graphical index of GRASS GIS modules +""" + +############################################################################ + + +def get_desc(cmd): + desc = "" + with open(cmd) as f: + while True: + line = f.readline() + if not line: + return desc + if "description:" in line: + desc = line.split(":", 1)[1].strip() + break + + return desc + + +############################################################################ + +man_dir = os.path.join(os.environ["ARCH_DISTDIR"], "docs", "mkdocs", "source") + +############################################################################ diff --git a/man/build_rest.py b/man/build_rest.py index a98a099a076..18554ad5216 100644 --- a/man/build_rest.py +++ b/man/build_rest.py @@ -9,10 +9,11 @@ """ # utilities for generating REST indices # utilities for generating HTML indices -# (C) 2003-2024 by Luca Delucchi and the GRASS Development Team +# (C) 2003-2025 by Luca Delucchi and the GRASS Development Team import os import string +from pathlib import Path # TODO: better fix this in include/Make/Rest.make, see bug RT #5361 @@ -78,6 +79,7 @@ Intro vector map processing and network analysis Intro database management Intro temporal data processing + Intro Graphical User Interface Display/Graphical User Interfaces ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -169,7 +171,7 @@ -------------- :doc:`Manual main page ` \| :doc:`Full Index ` - 2003-2024 `GRASS Development Team `_, GRASS GIS ${grass_version} Reference Manual + 2003-2025 `GRASS Development Team `_, GRASS GIS ${grass_version} Reference Manual """ # noqa: E501 ) @@ -271,16 +273,11 @@ def check_for_desc_override(basename): def read_file(name): - f = open(name) - s = f.read() - f.close() - return s + return Path(name).read_text() def write_file(name, contents): - f = open(name, "w") - f.write(contents) - f.close() + Path(name).write_text(contents) def try_mkdir(path): @@ -337,25 +334,25 @@ def write_rest_footer(f, index_url): def get_desc(cmd): - f = open(cmd) - while True: - line = f.readline() - if not line: - return "" - if "NAME" in line: - break - - while True: - line = f.readline() - if not line: - return "" - if "SYNOPSIS" in line: - break - if "*" in line: - sp = line.split("-", 1) - if len(sp) > 1: - return sp[1].strip() - return None + with Path(cmd).open() as f: + while True: + line = f.readline() + if not line: + return "" + if "NAME" in line: + break + + while True: + line = f.readline() + if not line: + return "" + if "SYNOPSIS" in line: + break + if "*" in line: + sp = line.split("-", 1) + if len(sp) > 1: + return sp[1].strip() + return None return "" diff --git a/man/build_topics.py b/man/build_topics.py index be5f2962d2c..2ef3e8c57df 100644 --- a/man/build_topics.py +++ b/man/build_topics.py @@ -1,91 +1,149 @@ #!/usr/bin/env python3 # generates topics.html and topic_*.html -# (c) 2012 by the GRASS Development Team, Markus Neteler, Luca Delucchi +# (c) 2012-2025 by the GRASS Development Team import os -import sys import glob -from build_html import ( - grass_version, - header1_tmpl, - headertopics_tmpl, - headerkey_tmpl, - desc1_tmpl, - moduletopics_tmpl, - write_html_footer, -) - -path = sys.argv[1] +from pathlib import Path + year = os.getenv("VERSION_DATE") min_num_modules_for_topic = 3 -keywords = {} - -htmlfiles = glob.glob1(path, "*.html") - -for fname in htmlfiles: - fil = open(os.path.join(path, fname)) - # TODO maybe move to Python re (regex) - lines = fil.readlines() - try: - index_keys = lines.index("

    KEYWORDS

    \n") + 1 - index_desc = lines.index("

    NAME

    \n") + 1 - except Exception: - continue - try: - key = lines[index_keys].split(",")[1].strip().replace(" ", "_") - key = key.split(">")[1].split("<")[0] - except Exception: - continue - try: - desc = lines[index_desc].split("-", 1)[1].strip() - except Exception: - desc.strip() - if key not in keywords.keys(): - keywords[key] = {} - keywords[key][fname] = desc - elif fname not in keywords[key]: - keywords[key][fname] = desc - -topicsfile = open(os.path.join(path, "topics.html"), "w") -topicsfile.write( - header1_tmpl.substitute( - title="GRASS GIS %s Reference Manual: Topics index" % grass_version - ) -) -topicsfile.write(headertopics_tmpl) - -for key, values in sorted(keywords.items(), key=lambda s: s[0].lower()): - keyfile = open(os.path.join(path, "topic_%s.html" % key), "w") - keyfile.write( - header1_tmpl.substitute( - title="GRASS GIS " - "%s Reference Manual: Topic %s" % (grass_version, key.replace("_", " ")) + +def build_topics(ext): + if ext == "html": + from build_html import ( + header1_tmpl, + headertopics_tmpl, + headerkey_tmpl, + desc1_tmpl, + moduletopics_tmpl, + man_dir, ) - ) - keyfile.write(headerkey_tmpl.substitute(keyword=key.replace("_", " "))) - num_modules = 0 - for mod, desc in sorted(values.items()): - num_modules += 1 - keyfile.write( - desc1_tmpl.substitute(cmd=mod, desc=desc, basename=mod.replace(".html", "")) + else: + from build_md import ( + header1_tmpl, + headertopics_tmpl, + headerkey_tmpl, + desc1_tmpl, + moduletopics_tmpl, + man_dir, ) - if num_modules >= min_num_modules_for_topic: - topicsfile.writelines( - [moduletopics_tmpl.substitute(key=key, name=key.replace("_", " "))] + + keywords = {} + + files = glob.glob1(man_dir, f"*.{ext}") + for fname in files: + with Path(man_dir, fname).open() as fil: + # TODO maybe move to Python re (regex) + lines = fil.readlines() + try: + if ext == "html": + index_keys = lines.index("

    KEYWORDS

    \n") + 1 + index_desc = lines.index("

    NAME

    \n") + 1 + else: + # expecting markdown + index_keys = lines.index("### KEYWORDS\n") + 3 + index_desc = lines.index("## NAME\n") + 2 + except Exception: + continue + try: + if ext == "html": + key = lines[index_keys].split(",")[1].strip().replace(" ", "_") + key = key.split(">")[1].split("<")[0] + else: + # expecting markdown + key = lines[index_keys].split("]")[0].lstrip("[") + except Exception: + continue + try: + desc = lines[index_desc].split("-", 1)[1].strip() + except Exception: + desc.strip() + + if key not in keywords.keys(): + keywords[key] = {} + keywords[key][fname] = desc + elif fname not in keywords[key]: + keywords[key][fname] = desc + + with Path(man_dir, f"topics.{ext}").open("w") as topicsfile: + topicsfile.write( + header1_tmpl.substitute( + title="GRASS GIS %s Reference Manual - Topics index" % grass_version + ) ) - keyfile.write("\n") - # link to the keywords index - # TODO: the labels in keywords index are with spaces and capitals - # this should be probably changed to lowercase with underscores - keyfile.write( - "

    See also the corresponding keyword" - ' {key}' - " for additional references.".format(key=key.replace("_", " ")) + topicsfile.write(headertopics_tmpl) + + for key, values in sorted(keywords.items(), key=lambda s: s[0].lower()): + with Path(man_dir, f"topic_%s.{ext}" % key.replace(" ", "_")).open( + "w" + ) as keyfile: + if ext == "html": + keyfile.write( + header1_tmpl.substitute( + title="GRASS GIS " + "%s Reference Manual: Topic %s" + % (grass_version, key.replace("_", " ")) + ) + ) + keyfile.write(headerkey_tmpl.substitute(keyword=key.replace("_", " "))) + num_modules = 0 + for mod, desc in sorted(values.items()): + num_modules += 1 + keyfile.write( + desc1_tmpl.substitute( + cmd=mod, desc=desc, basename=mod.replace(f".{ext}", "") + ) + ) + if num_modules >= min_num_modules_for_topic: + topicsfile.writelines( + [ + moduletopics_tmpl.substitute( + key=key, name=key.replace("_", " ") + ) + ] + ) + if ext == "html": + keyfile.write("\n") + else: + keyfile.write("\n") + # link to the keywords index + # TODO: the labels in keywords index are with spaces and capitals + # this should be probably changed to lowercase with underscores + if ext == "html": + keyfile.write( + "

    See also the corresponding keyword" + ' {key}' + " for additional references.".format( + key=key.replace("_", " ") + ) + ) + else: + # expecting markdown + keyfile.write( + "*See also the corresponding keyword" + " [{key}](keywords.md#{key})" + " for additional references.*\n".format( + key=key.replace(" ", "-").replace("_", "-").lower() + ) + ) + + write_footer(keyfile, f"index.{ext}", year, template=ext) + + if ext == "html": + topicsfile.write("\n") + write_footer(topicsfile, f"index.{ext}", year, template=ext) + + +if __name__ == "__main__": + from build import ( + grass_version, + write_footer, ) - write_html_footer(keyfile, "index.html", year) -topicsfile.write("\n") -write_html_footer(topicsfile, "index.html", year) -topicsfile.close() + + build_topics("html") + + build_topics("md") diff --git a/man/mkdocs/grassdocs.css b/man/mkdocs/grassdocs.css new file mode 100644 index 00000000000..e42cc692fd4 --- /dev/null +++ b/man/mkdocs/grassdocs.css @@ -0,0 +1,21 @@ +:root > * { + --md-primary-fg-color: #088B36; + --md-primary-fg-color--light: #088B36; + --md-primary-fg-color--dark: #088B36; + --md-footer-bg-color: #088B36; + --md-footer-bg-color--light: #088B36; + --md-footer-bg-color--dark: #088B36; +} + +.md-header__button.md-logo { + margin-top: 0; + margin-bottom: 0; + padding-top: 0; + padding-bottom: 0; +} + +.md-header__button.md-logo img, +.md-header__button.md-logo svg { + height: 70%; + width: 70%; +} diff --git a/man/mkdocs/mkdocs.yml b/man/mkdocs/mkdocs.yml new file mode 100644 index 00000000000..dcd4deec542 --- /dev/null +++ b/man/mkdocs/mkdocs.yml @@ -0,0 +1,46 @@ +--- +site_name: !ENV SITE_NAME +site_url: https://grass.osgeo.org/grass-stable/manuals/ +docs_dir: source +extra: + homepage: ./index.html +theme: + name: material + custom_dir: overrides + language: en + logo: grass_logo.png + features: + - content.code.copy + - navigation.footer + palette: + primary: custom +copyright: !ENV COPYRIGHT +extra_css: + - grassdocs.css +plugins: + - search + - glightbox +use_directory_urls: false +nav: + - GUI: wxGUI.md + - Startup: grass.md + - Databases: database.md + - Display: display.md + - General: general.md + - Imagery: imagery.md + - Misc: miscellaneous.md + - Postscript: postscript.md + - Raster: raster.md + - Raster 3D: raster3d.md + - SQL: sql.md + - Temporal: temporal.md + - Variables: variables.md + - Vector: vector.md + - Keywords: keywords.md + - Topics: topics.md +markdown_extensions: + - admonition + - pymdownx.details + - pymdownx.superfences + - attr_list + - md_in_html diff --git a/man/mkdocs/overrides/partials/footer.html b/man/mkdocs/overrides/partials/footer.html new file mode 100644 index 00000000000..a713e2e6db8 --- /dev/null +++ b/man/mkdocs/overrides/partials/footer.html @@ -0,0 +1,100 @@ + + + +

    diff --git a/man/mkdocs/requirements.txt b/man/mkdocs/requirements.txt new file mode 100644 index 00000000000..36f78605155 --- /dev/null +++ b/man/mkdocs/requirements.txt @@ -0,0 +1,5 @@ +mkdocs +mkdocs-glightbox +mkdocs-material +pymdown-extensions +pyyaml-env-tag diff --git a/man/parser_standard_options.py b/man/parser_standard_options.py index ad027a1b6b4..5ff79455eef 100644 --- a/man/parser_standard_options.py +++ b/man/parser_standard_options.py @@ -10,13 +10,6 @@ from urllib.request import urlopen -from build_html import ( - header1_tmpl, - grass_version, - headerpso_tmpl, - write_html_footer, -) - def parse_options(lines, startswith="Opt"): def split_in_groups(lines): @@ -132,6 +125,21 @@ def csv(self, delimiter=";", endline="\n"): ) return endline.join(csv) + def markdown(self, endline="\n"): + """Return a Markdown table with the options""" + # write header + md = ["| " + " | ".join(self.columns) + " |"] + md.append("| " + " | ".join(len(x) * "-" for x in self.columns) + " |") + + # write body + for optname, options in self.options: + row = "| {0} ".format(optname) + for col in self.columns: + row += "| {0} ".format(options.get(col, "")) + md.append(row + "|") + + return endline.join(md) + def html(self, endline="\n", indent=" ", toptions="border=1"): """Return a HTML table with the options""" html = ["".format(" " + toptions if toptions else "")] @@ -161,9 +169,10 @@ def _repr_html_(self): if __name__ == "__main__": URL = ( - "https://trac.osgeo.org/grass/browser/grass/" - "trunk/lib/gis/parser_standard_options.c?format=txt" + "https://raw.githubusercontent.com/OSGeo/grass/main/" + "lib/gis/parser_standard_options.c" ) + parser = argparse.ArgumentParser( description="Extract GRASS default options from link." ) @@ -172,7 +181,7 @@ def _repr_html_(self): "--format", default="html", dest="format", - choices=["html", "csv", "grass"], + choices=["html", "csv", "grass", "markdown"], help="Define the output format", ) parser.add_argument( @@ -220,21 +229,45 @@ def _repr_html_(self): options = OptTable(parse_options(cfile.readlines(), startswith=args.startswith)) outform = args.format - if outform in {"csv", "html"}: + if outform in ("csv", "html", "markdown"): print(getattr(options, outform)(), file=args.output) args.output.close() else: year = os.getenv("VERSION_DATE") name = args.output.name args.output.close() - topicsfile = open(name, "w") - topicsfile.write( - header1_tmpl.substitute( - title="GRASS GIS " - "%s Reference Manual: Parser standard options index" % grass_version - ) + + def write_output(ext): + with open(name, "w") as outfile: + outfile.write( + header1_tmpl.substitute( + title=f"GRASS GIS {grass_version} Reference Manual: " + "Parser standard options index" + ) + ) + outfile.write(headerpso_tmpl) + if ext == "html": + outfile.write(options.html(toptions=args.htmlparmas)) + else: + outfile.write(options.markdown()) + write_footer(outfile, f"index.{ext}", year, template=ext) + + from build import ( + grass_version, + write_footer, ) - topicsfile.write(headerpso_tmpl) - topicsfile.write(options.html(toptions=args.htmlparmas)) - write_html_footer(topicsfile, "index.html", year) - topicsfile.close() + + ext = os.path.splitext(name)[1][1:] + + if ext == "html": + from build_html import ( + header1_tmpl, + headerpso_tmpl, + ) + else: + from build_md import ( + header1_tmpl, + headerpso_tmpl, + ) + + write_output(ext) # html or md diff --git a/man/sphinx/conf.py b/man/sphinx/conf.py index dbe8fb9022b..16f67074a80 100644 --- a/man/sphinx/conf.py +++ b/man/sphinx/conf.py @@ -38,7 +38,7 @@ # General information about the project. project = "GRASS GIS" -copyright = "2024, GRASS Development Team" +copyright = "2025, GRASS Development Team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/misc/m.cogo/m.cogo.html b/misc/m.cogo/m.cogo.html index 28fcc6e4703..4135ed91ecc 100644 --- a/misc/m.cogo/m.cogo.html +++ b/misc/m.cogo/m.cogo.html @@ -80,7 +80,7 @@

    EXAMPLES

    m.cogo -l in=cogo.dat
    -Where the cogo.dat input file looks like: +Where the cogo.dat input file looks like:
     # Sample COGO input file -- This defines an area.
     # <label> <bearing> <distance>
    @@ -135,7 +135,7 @@ 

    EXAMPLES

    If necessary, snap the boundary closed with the v.clean module. -Use tool=snap and thresh=0.0001, or some small value. +Use tool=snap and thresh=0.0001, or some small value.

    SEE ALSO

    diff --git a/misc/m.measure/m.measure.html b/misc/m.measure/m.measure.html index 97c7b44ff63..388c3d81b23 100644 --- a/misc/m.measure/m.measure.html +++ b/misc/m.measure/m.measure.html @@ -19,7 +19,7 @@

    EXAMPLES

    -Visualization (with d.geodesic) of m.measure distance example
    +Visualization (with d.geodesic) of m.measure distance example
    Visualization (with d.geodesic) of m.measure distance example
    diff --git a/misc/m.nviz.script/m.nviz.script.html b/misc/m.nviz.script/m.nviz.script.html index d5c4997ed86..f7cddaf1d82 100644 --- a/misc/m.nviz.script/m.nviz.script.html +++ b/misc/m.nviz.script/m.nviz.script.html @@ -11,7 +11,7 @@

    DESCRIPTION

    The script generated by m.nviz.script can be run from the NVIZ command line (nviz script=script_name) or after NVIZ is started by -selecting Scripting->Play Script. +selecting Scripting->Play Script.

    OPTIONS

    @@ -23,14 +23,14 @@

    Flags:

    -c
    Flay at constant elevation
    With this flag the camera will be set to an elevation given by the - ht= parameter. The default is to fly at ht= - above the topography (i.e. camera height = elevation + ht) + ht= parameter. The default is to fly at ht= + above the topography (i.e. camera height = elevation + ht)
    -k
    Output KeyFrame file
    Generate a KeyFrame file that can be loaded from the NVIZ - Keyframe Animation panel. The KeyFrame file is - automatically assigned the script name with a - .kanimator extension. + Keyframe Animation panel. The KeyFrame file is + automatically assigned the script name with a + .kanimator extension.
    -o
    Render the animation in an off-screen context
    -e diff --git a/misc/m.transform/m.transform.html b/misc/m.transform/m.transform.html index e2f237cc832..b80490b39e7 100644 --- a/misc/m.transform/m.transform.html +++ b/misc/m.transform/m.transform.html @@ -5,7 +5,7 @@

    DESCRIPTION

    If coordinates are given with the input file option or fed from -stdin, both the input and the output format is "x y" with one +stdin, both the input and the output format is "x y" with one coordinate pair per line. Reverse transform is performed with the -r flag. @@ -21,44 +21,46 @@

    NOTES

    The transformations are:

    order=1:

    -    e = [E0 E1][1].[1]
    +    e = [E0 E1][1]·[1]
             [E2  0][e] [n]
     
    -    n = [N0 N1][1].[1]
    +    n = [N0 N1][1]·[1]
             [N2  0][e] [n]
     
    order=2:
         e = [E0 E1 E3][1 ] [1 ]
    -        [E2 E4  0][e ].[n ]
    +        [E2 E4  0][e ]·[n ]
             [E5  0  0][e²] [n²]
     
         n = [N0 N1 N3][1 ] [1 ]
    -        [N2 N4  0][e ].[n ]
    +        [N2 N4  0][e ]·[n ]
             [N5  0  0][e²] [n²]
     
    order=3:
         e = [E0 E1 E3 E6][1 ] [1 ]
    -        [E2 E4 E7  0][e ].[n ]
    +        [E2 E4 E7  0][e ]·[n ]
             [E5 E8  0  0][e²] [n²]
             [E9  0  0  0][e³] [n³]
     
         n = [N0 N1 N3 N6][1 ] [1 ]
    -        [N2 N4 N7  0][e ].[n ]
    +        [N2 N4 N7  0][e ]·[n ]
             [N5 N8  0  0][e²] [n²]
             [N9  0  0  0][e³] [n³]
     
    -["." = dot-product, (AE).N = N'EA.] -

    In other words, order=1 and order=2 are equivalent to order=3 with -the higher coefficients equal to zero. +["·" = dot-product, (AE)·N = N'EA] + +

    In other words, order=1 and order=2 are equivalent +to order=3 with the higher coefficients equal to zero.

    SEE ALSO

    +i.ortho.transform, i.rectify, v.rectify, v.transform diff --git a/mswindows/GRASS-Installer.nsi.tmpl b/mswindows/GRASS-Installer.nsi.tmpl index 2b2722a5083..ab01e133a51 100644 --- a/mswindows/GRASS-Installer.nsi.tmpl +++ b/mswindows/GRASS-Installer.nsi.tmpl @@ -4,7 +4,7 @@ ;Written by Marco Pasetti ;Updated for OSGeo4W by Colin Nielsen, Helmut Kudrnovsky, and Martin Landa ;Last Update: $Id$ -;Mail to: grass-dev@lists.osgeo.org +;Mail to: grass-dev@lists.osgeo.org ;---------------------------------------------------------------------------------------------------------------------------- @@ -102,7 +102,7 @@ ShowUnInstDetails hide ;StrReplace Function ;Replaces all occurrences of a given needle within a haystack with another string ;Written by dandaman32 - + Var STR_REPLACE_VAR_0 Var STR_REPLACE_VAR_1 Var STR_REPLACE_VAR_2 @@ -112,7 +112,7 @@ Var STR_REPLACE_VAR_5 Var STR_REPLACE_VAR_6 Var STR_REPLACE_VAR_7 Var STR_REPLACE_VAR_8 - + Function StrReplace Exch $STR_REPLACE_VAR_2 Exch 1 @@ -140,7 +140,7 @@ Function StrReplace Pop $STR_REPLACE_VAR_1 ; stack as it was before the function was called Exch $STR_REPLACE_VAR_0 FunctionEnd - + !macro _strReplaceConstructor OUT NEEDLE NEEDLE2 HAYSTACK Push "${HAYSTACK}" Push "${NEEDLE}" @@ -148,7 +148,7 @@ FunctionEnd Call StrReplace Pop "${OUT}" !macroend - + !define StrReplace '!insertmacro "_strReplaceConstructor"' ;---------------------------------------------------------------------------------------------------------------------------- @@ -184,35 +184,35 @@ Function .onInit Var /GLOBAL UNINSTALL_STRING Var /GLOBAL INSTALL_PATH - + Var /GLOBAL INSTALLED_VERSION_NUMBER Var /GLOBAL INSTALLED_GIT_REVISION Var /GLOBAL INSTALLED_BINARY_REVISION - + Var /GLOBAL INSTALLED_VERSION - + Var /GLOBAL DISPLAYED_INSTALLED_VERSION - + Var /GLOBAL MESSAGE_0_ Var /GLOBAL MESSAGE_1_ Var /GLOBAL MESSAGE_2_ Var /GLOBAL MESSAGE_3_ - + ReadRegStr $UNINSTALL_STRING HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" "UninstallString" ReadRegStr $INSTALL_PATH HKLM "Software\${GRASS_BASE}" "InstallPath" ReadRegStr $INSTALLED_VERSION_NUMBER HKLM "Software\${GRASS_BASE}" "VersionNumber" ReadRegStr $INSTALLED_GIT_REVISION HKLM "Software\${GRASS_BASE}" "GitRevision" - + ${If} $INSTALLED_GIT_REVISION == "" ReadRegStr $INSTALLED_GIT_REVISION HKLM "Software\${GRASS_BASE}" "Revision" - ${EndIf} - + ${EndIf} + ReadRegStr $INSTALLED_BINARY_REVISION HKLM "Software\${GRASS_BASE}" "BinaryRevision" - + StrCpy $MESSAGE_0_ "${CHECK_INSTALL_NAME} is already installed on your system.$\r$\n" StrCpy $MESSAGE_0_ "$MESSAGE_0_$\r$\n" - - !if ${INSTALLER_TYPE} == "Release" + + !if ${INSTALLER_TYPE} == "Release" ${If} $INSTALLED_BINARY_REVISION == "" StrCpy $DISPLAYED_INSTALLED_VERSION "$INSTALLED_VERSION_NUMBER" ${Else} @@ -221,32 +221,32 @@ Function .onInit !else StrCpy $DISPLAYED_INSTALLED_VERSION "$INSTALLED_VERSION_NUMBER-$INSTALLED_GIT_REVISION-$INSTALLED_BINARY_REVISION" !endif - + StrCpy $MESSAGE_0_ "$MESSAGE_0_The installed release is $DISPLAYED_INSTALLED_VERSION$\r$\n" - + StrCpy $MESSAGE_1_ "$MESSAGE_0_$\r$\n" StrCpy $MESSAGE_1_ "$MESSAGE_1_You are going to install a newer release of ${CHECK_INSTALL_NAME}$\r$\n" StrCpy $MESSAGE_1_ "$MESSAGE_1_$\r$\n" StrCpy $MESSAGE_1_ "$MESSAGE_1_Press OK to uninstall GRASS $DISPLAYED_INSTALLED_VERSION" StrCpy $MESSAGE_1_ "$MESSAGE_1_ and install ${DISPLAYED_NAME} or Cancel to quit." - + StrCpy $MESSAGE_2_ "$MESSAGE_0_$\r$\n" StrCpy $MESSAGE_2_ "$MESSAGE_2_You are going to install an older release of ${CHECK_INSTALL_NAME}$\r$\n" StrCpy $MESSAGE_2_ "$MESSAGE_2_$\r$\n" StrCpy $MESSAGE_2_ "$MESSAGE_2_Press OK to uninstall GRASS $DISPLAYED_INSTALLED_VERSION" StrCpy $MESSAGE_2_ "$MESSAGE_2_ and install ${DISPLAYED_NAME} or Cancel to quit." - + StrCpy $MESSAGE_3_ "$MESSAGE_0_$\r$\n" StrCpy $MESSAGE_3_ "$MESSAGE_3_This is the latest release available.$\r$\n" StrCpy $MESSAGE_3_ "$MESSAGE_3_$\r$\n" StrCpy $MESSAGE_3_ "$MESSAGE_3_Press OK to reinstall ${DISPLAYED_NAME} or Cancel to quit." - + IntOp $INSTALLED_GIT_REVISION $INSTALLED_GIT_REVISION * 1 IntOp $INSTALLED_BINARY_REVISION $INSTALLED_BINARY_REVISION * 1 IntOp $INSTALLED_VERSION $INSTALLED_GIT_REVISION + $INSTALLED_BINARY_REVISION - + !define /math VERSION ${GIT_REVISION} + ${BINARY_REVISION} - + ${If} $INSTALLED_VERSION_NUMBER == "" ${Else} ${If} $INSTALLED_VERSION < ${VERSION} @@ -275,9 +275,9 @@ Function .onInit quit_reinstall: Abort continue_reinstall: - ${EndIf} + ${EndIf} ${EndIf} - + ${If} $INSTALLED_VERSION_NUMBER == "" ${Else} ${If} $0 = 0 @@ -295,10 +295,10 @@ FunctionEnd Function CheckUpdate - ${If} $ASK_FOR_PATH == "NO" + ${If} $ASK_FOR_PATH == "NO" Abort ${EndIf} - + FunctionEnd ;---------------------------------------------------------------------------------------------------------------------------- @@ -310,26 +310,26 @@ FunctionEnd ;Function CheckInstDir ; Var /GLOBAL INSTDIR_TEST -; Var /GLOBAL INSTDIR_LENGTH +; Var /GLOBAL INSTDIR_LENGTH ; Var /GLOBAL INSTDIR_TEST_LENGTH ; Var /GLOBAL MESSAGE_CHKINST_ -; +; ; StrCpy $MESSAGE_CHKINST_ "WARNING: you are about to install GRASS into a directory that has spaces$\r$\n" ; StrCpy $MESSAGE_CHKINST_ "$MESSAGE_CHKINST_in either its name or the path of directories leading up to it.$\r$\n" ; StrCpy $MESSAGE_CHKINST_ "$MESSAGE_CHKINST_Some functionalities of GRASS might be hampered by this. We would highly$\r$\n" ; StrCpy $MESSAGE_CHKINST_ "$MESSAGE_CHKINST_appreciate if you tried and reported any problems, so that we can fix them.$\r$\n" ; StrCpy $MESSAGE_CHKINST_ "$MESSAGE_CHKINST_However, if you want to avoid any such issues, we recommend that you$\r$\n" ; StrCpy $MESSAGE_CHKINST_ "$MESSAGE_CHKINST_choose a simple installation path without spaces, such as: C:\${GRASS_BASE}.$\r$\n" -; +; ; ${StrReplace} "$INSTDIR_TEST" " " "" "$INSTDIR" -; +; ; StrLen $INSTDIR_LENGTH "$INSTDIR" ; StrLen $INSTDIR_TEST_LENGTH "$INSTDIR_TEST" -; -; ${If} $INSTDIR_TEST_LENGTH < $INSTDIR_LENGTH +; +; ${If} $INSTDIR_TEST_LENGTH < $INSTDIR_LENGTH ; MessageBox MB_OK|MB_ICONEXCLAMATION "$MESSAGE_CHKINST_" ; ${EndIf} -; +; ;FunctionEnd ;---------------------------------------------------------------------------------------------------------------------------- @@ -351,39 +351,39 @@ Function ReplaceLineStr Push $R7 ; input string length Push $R8 ; line string length Push $R9 ; global - + StrLen $R7 $R1 - + GetTempFileName $R4 - + FileOpen $R5 $R4 w FileOpen $R3 $R2 r - + ReadLoop: ClearErrors FileRead $R3 $R6 IfErrors Done - + StrLen $R8 $R6 StrCpy $R9 $R6 $R7 -$R8 StrCmp $R9 $R1 0 +3 - + FileWrite $R5 "$R0$\r$\n" Goto ReadLoop - + FileWrite $R5 $R6 Goto ReadLoop - + Done: - + FileClose $R3 FileClose $R5 - + SetDetailsPrint none Delete $R2 Rename $R4 $R2 SetDetailsPrint both - + Pop $R9 Pop $R8 Pop $R7 @@ -491,24 +491,24 @@ Var /GLOBAL DOWNLOAD_MESSAGE_ Section "GRASS" SecGRASS SectionIn RO - + ;Set the INSTALL_DIR variable Var /GLOBAL INSTALL_DIR - - ${If} $ASK_FOR_PATH == "NO" + + ${If} $ASK_FOR_PATH == "NO" StrCpy $INSTALL_DIR "$INSTALL_PATH" ${Else} StrCpy $INSTALL_DIR "$INSTDIR" ${EndIf} - + ;Set to try to overwrite existing files - SetOverwrite try - + SetOverwrite try + ;Set the GIS_DATABASE directory SetShellVarContext current - Var /GLOBAL GIS_DATABASE + Var /GLOBAL GIS_DATABASE StrCpy $GIS_DATABASE "$DOCUMENTS\grassdata" - + ;Create the GIS_DATABASE directory CreateDirectory "$GIS_DATABASE" @@ -516,7 +516,7 @@ Section "GRASS" SecGRASS SetOutPath "$INSTALL_DIR" File .\Installer-Files\GRASS-WebSite.url File .\Installer-Files\WinGRASS-README.url - + ;add GRASS files SetOutPath "$INSTALL_DIR" File /r ${PACKAGE_FOLDER}\*.* @@ -524,12 +524,12 @@ Section "GRASS" SecGRASS ;grant $INSTDIR\etc read write accessible and show if succeeded: error if it failed AccessControl::GrantOnFile "$INSTDIR\etc" "(S-1-5-32-545)" "FullAccess" Pop $R0 - DetailPrint $R0 - ;grant modifying/overwriting fontcap file and show if succeeded: error if it failed + DetailPrint $R0 + ;grant modifying/overwriting fontcap file and show if succeeded: error if it failed AccessControl::GrantOnFile "$INSTDIR\etc\fontcap" "(S-1-5-32-545)" "FullAccess" Pop $R0 - DetailPrint $R0 - + DetailPrint $R0 + ;create run_gmkfontcap.bat ClearErrors FileOpen $0 $INSTALL_DIR\etc\run_gmkfontcap.bat w @@ -567,12 +567,12 @@ Section "GRASS" SecGRASS FileOpen $0 $INSTALL_DIR\etc\run_gmkfontcap.bat.manifest w IfErrors done_create_run_gmkfontcap.bat.manifest FileWrite $0 ' $\r$\n' - FileWrite $0 '$\r$\n' + FileWrite $0 '$\r$\n' FileWrite $0 ' $\r$\n' - FileWrite $0 ' GRASS help script:run_gmkfontcap$\r$\n' + FileWrite $0 ' type="win32"/>$\r$\n' + FileWrite $0 ' GRASS help script:run_gmkfontcap$\r$\n' FileWrite $0 ' $\r$\n' FileWrite $0 ' $\r$\n' FileWrite $0 ' $\r$\n' @@ -586,13 +586,13 @@ Section "GRASS" SecGRASS FileWrite $0 '$\r$\n' FileClose $0 done_create_run_gmkfontcap.bat.manifest: - + ;Run g.mkfontcap outside a grass session during installation to catch all fonts ExecWait '"$INSTALL_DIR\etc\run_gmkfontcap.bat"' ;set $INSTDIR\etc back to read accessible - AccessControl::SetOnFile "$INSTDIR\etc" "(S-1-5-32-545)" "GenericRead + GenericExecute" - + AccessControl::SetOnFile "$INSTDIR\etc" "(S-1-5-32-545)" "GenericRead + GenericExecute" + ;Install demolocation into the GIS_DATABASE directory SetOutPath "$GIS_DATABASE\demolocation" File /r ${DEMOLOCATION_PATH}\*.* @@ -605,9 +605,9 @@ Section "GRASS" SecGRASS ;Create the Uninstaller WriteUninstaller "$INSTALL_DIR\Uninstall-GRASS.exe" - + ;Registry Key Entries - + ;HKEY_LOCAL_MACHINE Install entries ;Set the Name, Version and Revision of GRASS + PublisherInfo + InstallPath WriteRegStr HKLM "Software\${GRASS_BASE}" "Name" "${GRASS_BASE}" @@ -617,11 +617,11 @@ Section "GRASS" SecGRASS WriteRegStr HKLM "Software\${GRASS_BASE}" "Publisher" "${PUBLISHER}" WriteRegStr HKLM "Software\${GRASS_BASE}" "WebSite" "${WEB_SITE}" WriteRegStr HKLM "Software\${GRASS_BASE}" "InstallPath" "$INSTALL_DIR" - + ;HKEY_LOCAL_MACHINE Uninstall entries WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" "DisplayName" "GRASS GIS @GRASS_VERSION_MAJOR@.@GRASS_VERSION_MINOR@" WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" "UninstallString" "$INSTALL_DIR\Uninstall-GRASS.exe" - + !if ${INSTALLER_TYPE} == "Release" WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}"\ "DisplayVersion" "${VERSION_NUMBER}-${BINARY_REVISION}" @@ -629,31 +629,31 @@ Section "GRASS" SecGRASS WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}"\ "DisplayVersion" "${VERSION_NUMBER}-${GIT_REVISION}-${BINARY_REVISION}" !endif - + WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" "DisplayIcon" "$INSTALL_DIR\gui\icons\grass.ico" WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" "EstimatedSize" 1 WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" "HelpLink" "${WIKI_PAGE}" WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" "URLInfoAbout" "${WEB_SITE}" WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" "Publisher" "${PUBLISHER}" - + ;Create the Desktop Shortcut SetShellVarContext current - + CreateShortCut "$DESKTOP\${GRASS_BASE}.lnk" "$INSTALL_DIR\${GRASS_COMMAND}.bat" "--gui"\ "$INSTALL_DIR\gui\icons\grass.ico" "" SW_SHOWNORMAL "" "Launch GRASS GIS ${VERSION_NUMBER}" - + ;Create the Windows Start Menu Shortcuts SetShellVarContext all - + CreateDirectory "$SMPROGRAMS\${GRASS_BASE}" - + CreateShortCut "$SMPROGRAMS\${GRASS_BASE}\${GRASS_BASE}.lnk" "$INSTALL_DIR\${GRASS_COMMAND}.bat" "--gui"\ "$INSTALL_DIR\gui\icons\grass.ico" "" SW_SHOWNORMAL "" "Launch GRASS GIS ${VERSION_NUMBER}" - + CreateShortCut "$SMPROGRAMS\${GRASS_BASE}\Uninstall ${GRASS_BASE}.lnk" "$INSTALL_DIR\Uninstall-GRASS.exe" ""\ "$INSTALL_DIR\Uninstall-GRASS.exe" "" SW_SHOWNORMAL "" "Uninstall GRASS GIS ${VERSION_NUMBER}" - + ;Create the grass_command.bat ClearErrors FileOpen $0 $INSTALL_DIR\${GRASS_COMMAND}.bat w @@ -680,20 +680,20 @@ Section "GRASS" SecGRASS FileWrite $0 'if %ERRORLEVEL% GEQ 1 pause' FileClose $0 done_create_grass_command.bat: - + ;Set the UNIX_LIKE GRASS Path Var /GLOBAL UNIX_LIKE_DRIVE Var /GLOBAL UNIX_LIKE_GRASS_PATH - + StrCpy $UNIX_LIKE_DRIVE "$INSTALL_DIR" 3 StrCpy $UNIX_LIKE_GRASS_PATH "$INSTALL_DIR" "" 3 - + ;replace "\" with "/" in $UNIX_LIKE_DRIVE ${StrReplace} "$UNIX_LIKE_DRIVE" "\" "/" "$UNIX_LIKE_DRIVE" - + ;replace ":" with "" in $UNIX_LIKE_DRIVE ${StrReplace} "$UNIX_LIKE_DRIVE" ":" "" "$UNIX_LIKE_DRIVE" - + ;replace "\" with "/" in $UNIX_LIKE_GRASS_PATH ${StrReplace} "$UNIX_LIKE_GRASS_PATH" "\" "/" "$UNIX_LIKE_GRASS_PATH" @@ -709,28 +709,28 @@ Section "GRASS" SecGRASS ;It first read the $PROFILE variable, to scan the OS version: ;If equal to "drive:\Users\UserName", the OS is Vista, and the $USERNAME variable set to $PROFILE - "drive:\Users\" ;If not, the OS is XP or previous, and the $USERNAME variable set to $PROFILE - "drive:\Documents and Settings\" - + ${If} $USERNAME == "" StrCpy $PROFILE_DRIVE "$PROFILE" 2 StrCpy $PROFILE_ROOT "$PROFILE" 5 -3 - ${If} $USERNAME = "Users" + ${If} $USERNAME = "Users" ${StrReplace} "$USERNAME" "$PROFILE_DRIVE\Users\" "" "$PROFILE" ${Else} ${StrReplace} "$USERNAME" "$PROFILE_DRIVE\Documents and Settings\" "" "$PROFILE" ${EndIf} ${EndIf} - + ;Get the short form of the install path (to allow for paths with spaces) VAR /GLOBAL INST_DIR_SHORT GetFullPathName /SHORT $INST_DIR_SHORT $INSTALL_DIR ;Set the Unix-Like GIS_DATABASE Path ;Var /GLOBAL UNIX_LIKE_GIS_DATABASE_PATH - + ;replace \ with / in $GIS_DATABASE ;${StrReplace} "$UNIX_LIKE_GIS_DATABASE_PATH" "\" "/" "$GIS_DATABASE" - SetShellVarContext current + SetShellVarContext current ${If} ${FileExists} "$APPDATA\GRASS@GRASS_VERSION_MAJOR@\rc" DetailPrint "File $APPDATA\GRASS@GRASS_VERSION_MAJOR@\rc already exists. Skipping." ${Else} @@ -742,7 +742,7 @@ Section "GRASS" SecGRASS FileWrite $0 'GISDBASE: $GIS_DATABASE$\r$\n' FileWrite $0 'LOCATION_NAME: demolocation$\r$\n' FileWrite $0 'MAPSET: PERMANENT$\r$\n' - FileClose $0 + FileClose $0 done_create_grass_rc: ${EndIf} @@ -751,7 +751,7 @@ Section "GRASS" SecGRASS Push 'gisbase = "/c/OSGeo4W/apps/grass/grass-@GRASS_VERSION_MAJOR@.@GRASS_VERSION_MINOR@.@GRASS_VERSION_RELEASE@"' ; string that a line must begin with *WS Sensitive* Push 'gisbase = "$INSTDIR"' ; string to replace whole line with Call ReplaceLineStr - + ;replace config_projshare Push "$INSTDIR\etc\grass@GRASS_VERSION_MAJOR@@GRASS_VERSION_MINOR@.py" ; file to modify Push 'config_projshare = "/c/OSGeo4W/share/proj"' ; string that a line must begin with *WS Sensitive* @@ -759,7 +759,7 @@ Section "GRASS" SecGRASS Call ReplaceLineStr ;replace BU with numeric group name for local users. Users S-1-5-32-545 does not work for Windows Enterprise. Try Authenticated Users S-1-5-11 - AccessControl::SetOnFile "$INSTDIR\etc\grass@GRASS_VERSION_MAJOR@@GRASS_VERSION_MINOR@.py" "(S-1-5-11)" "GenericRead + GenericExecute" + AccessControl::SetOnFile "$INSTDIR\etc\grass@GRASS_VERSION_MAJOR@@GRASS_VERSION_MINOR@.py" "(S-1-5-11)" "GenericRead + GenericExecute" SectionEnd ;-------------------------------------------------------------------------- @@ -789,30 +789,30 @@ Function DownloadInstallMSRuntime StrCpy $DOWNLOAD_MESSAGE_ "$DOWNLOAD_MESSAGE_ installation without the $EXTENDED_ARCHIVE_NAME.$\r$\n" MessageBox MB_OKCANCEL "$DOWNLOAD_MESSAGE_" IDOK download IDCANCEL cancel_download - - download: - SetShellVarContext current + + download: + SetShellVarContext current InitPluginsDir NSISdl::download "$HTTP_PATH/$ARCHIVE_NAME" "$TEMP\$ARCHIVE_NAME" Pop $0 StrCmp $0 "success" download_ok download_failed - - download_ok: + + download_ok: InitPluginsDir untgz::extract -d "$TEMP\$ORIGINAL_UNTAR_FOLDER" -zbz2 "$TEMP\$ARCHIVE_NAME" Pop $0 StrCmp $0 "success" untar_ok untar_failed - + download_failed: DetailPrint "$0" ;print error message to log MessageBox MB_OK "Download Failed.$\r$\nGRASS will be installed without the $EXTENDED_ARCHIVE_NAME." Goto end - + cancel_download: MessageBox MB_OK "Download Cancelled.$\r$\nGRASS will be installed without the $EXTENDED_ARCHIVE_NAME." Goto end - + untar_failed: DetailPrint "$0" ;print error message to log @@ -822,7 +822,7 @@ Function DownloadInstallMSRuntime CopyFiles "$TEMP\$ORIGINAL_UNTAR_FOLDER\bin\*.dll" "$INSTALL_DIR\extrabin" DetailPrint "MS runtime files installed." Goto end - + end: FunctionEnd @@ -831,15 +831,15 @@ Section "Important Microsoft Runtime DLLs" SecMSRuntime ;Set the size (in KB) of the archive file StrCpy $ARCHIVE_SIZE_KB 833 - + ;Set the size (in KB) of the unpacked archive file AddSize 13500 - + StrCpy $HTTP_PATH "http://download.osgeo.org/osgeo4w/v2/${PLATFORM}/release/msvcrt2019/" StrCpy $ARCHIVE_NAME "msvcrt2019-14.2-1.tar.bz2" StrCpy $EXTENDED_ARCHIVE_NAME "Microsoft Visual C++ Redistributable Packages" StrCpy $ORIGINAL_UNTAR_FOLDER "install_msruntime" - + Call DownloadInstallMSRuntime SectionEnd @@ -847,7 +847,7 @@ SectionEnd Function DownloadDataSet ; IntOp $ARCHIVE_SIZE_MB $ARCHIVE_SIZE_KB / 1024 - + StrCpy $DOWNLOAD_MESSAGE_ "The installer will download the $EXTENDED_ARCHIVE_NAME sample data set.$\r$\n" StrCpy $DOWNLOAD_MESSAGE_ "$DOWNLOAD_MESSAGE_$\r$\n" ; StrCpy $DOWNLOAD_MESSAGE_ "$DOWNLOAD_MESSAGE_The archive is about $ARCHIVE_SIZE_MB MB and may take" @@ -859,40 +859,40 @@ Function DownloadDataSet StrCpy $DOWNLOAD_MESSAGE_ "$DOWNLOAD_MESSAGE_$\r$\n" StrCpy $DOWNLOAD_MESSAGE_ "$DOWNLOAD_MESSAGE_Press OK to continue or Cancel to skip the download and complete the GRASS" StrCpy $DOWNLOAD_MESSAGE_ "$DOWNLOAD_MESSAGE_ installation without the $EXTENDED_ARCHIVE_NAME data set.$\r$\n" - + MessageBox MB_OKCANCEL "$DOWNLOAD_MESSAGE_" IDOK download IDCANCEL cancel_download - - download: - SetShellVarContext current + + download: + SetShellVarContext current InitPluginsDir NSISdl::download "$HTTP_PATH/$ARCHIVE_NAME" "$TEMP\$ARCHIVE_NAME" Pop $0 StrCmp $0 "success" download_ok download_failed - - download_ok: + + download_ok: InitPluginsDir untgz::extract -d "$GIS_DATABASE" "$TEMP\$ARCHIVE_NAME" Pop $0 StrCmp $0 "success" untar_ok untar_failed - - untar_ok: + + untar_ok: Rename "$GIS_DATABASE\$ORIGINAL_UNTAR_FOLDER" "$GIS_DATABASE\$CUSTOM_UNTAR_FOLDER" Delete "$TEMP\$ARCHIVE_NAME" Goto end - + download_failed: DetailPrint "$0" ;print error message to log MessageBox MB_OK "Download Failed.$\r$\nGRASS will be installed without the $EXTENDED_ARCHIVE_NAME sample data set." Goto end - + cancel_download: MessageBox MB_OK "Download Cancelled.$\r$\nGRASS will be installed without the $EXTENDED_ARCHIVE_NAME sample data set." Goto end - + untar_failed: DetailPrint "$0" ;print error message to log - + end: FunctionEnd @@ -901,34 +901,34 @@ Section /O "North Carolina (Wake County) Data Set" SecNorthCarolinaSDB ;Set the size (in KB) of the archive file StrCpy $ARCHIVE_SIZE_KB 144213 - + ;Set the size (in KB) of the unpacked archive file AddSize 254521 - + StrCpy $HTTP_PATH "https://grass.osgeo.org/sampledata/north_carolina/" StrCpy $ARCHIVE_NAME "nc_spm_08_grass7.tar.gz" StrCpy $EXTENDED_ARCHIVE_NAME "North Carolina (Wake County)" StrCpy $ORIGINAL_UNTAR_FOLDER "nc_spm_08_grass7" StrCpy $CUSTOM_UNTAR_FOLDER "North_Carolina" - - Call DownloadDataSet - + + Call DownloadDataSet + SectionEnd Section /O "South Dakota (Spearfish County) Data Set" SecSpearfishSDB ;Set the size (in KB) of the archive file StrCpy $ARCHIVE_SIZE_KB 20803 - + ;Set the size (in KB) of the unpacked archive file AddSize 42171 - + StrCpy $HTTP_PATH "https://grass.osgeo.org/sampledata" StrCpy $ARCHIVE_NAME "spearfish_grass70data-0.3.tar.gz" StrCpy $EXTENDED_ARCHIVE_NAME "South Dakota (Spearfish County)" StrCpy $ORIGINAL_UNTAR_FOLDER "spearfish60_grass7" StrCpy $CUSTOM_UNTAR_FOLDER "Spearfish60_grass7" - + Call DownloadDataSet SectionEnd @@ -940,7 +940,7 @@ Function .onInstSuccess ${If} ${SectionIsSelected} ${SecMSRuntime} Delete "$TEMP\$ARCHIVE_NAME" RMDir /r "$TEMP\$ORIGINAL_UNTAR_FOLDER" - RMDir "$TEMP\$ORIGINAL_UNTAR_FOLDER" + RMDir "$TEMP\$ORIGINAL_UNTAR_FOLDER" ${EndIf} FunctionEnd @@ -951,23 +951,23 @@ FunctionEnd Section "Uninstall" ;remove files & folders RMDir /r "$INSTDIR" - + ;remove the Desktop ShortCut SetShellVarContext current Delete "$DESKTOP\${GRASS_BASE}.lnk" - + ;remove the Programs Start ShortCuts SetShellVarContext all RMDir /r "$SMPROGRAMS\${GRASS_BASE}" - + ;remove the $APPDATA\GRASS@GRASS_VERSION_MAJOR@ folder ;disabled, don't remove user settings ; SetShellVarContext current - ;RMDir /r "$APPDATA\GRASS@GRASS_VERSION_MAJOR@" + ;RMDir /r "$APPDATA\GRASS@GRASS_VERSION_MAJOR@" ;${If} ${FileExists} "$APPDATA\GRASS@GRASS_VERSION_MAJOR@\addons\*.*" ; RMDir /r "$APPDATA\GRASS@GRASS_VERSION_MAJOR@\addons" ;${EndIf} - + ;remove the Registry Entries DeleteRegKey HKLM "Software\${GRASS_BASE}" DeleteRegKey HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\${GRASS_BASE}" diff --git a/mswindows/Makefile b/mswindows/Makefile index 3a33ac70b74..de2a4c6463c 100644 --- a/mswindows/Makefile +++ b/mswindows/Makefile @@ -11,7 +11,7 @@ ifneq ($(MINGW),) default: GRASS-Packager.bat GRASS-Installer.nsi parsubdirs else default: -endif +endif GRASS-Packager.bat: GRASS-Packager.bat.tmpl sed \ diff --git a/mswindows/external/README.license b/mswindows/external/README.license index 2423c610f5f..f9345809a9d 100644 --- a/mswindows/external/README.license +++ b/mswindows/external/README.license @@ -1,21 +1,21 @@ - - -In the following directories you will find code which was not -written by The GRASS Development Team. - -Parts of GRASS are not copyright by The GRASS Development Team. -The original authors hold the copyrights and you have to abide -to their licensing terms where noted. -(Keep in mind that code linking into GRASS can only be distributed -if compatible with the GPL.) - -Specifically the source code below this directory is not necessarily -licensed under the GPL. Check the READMEs and source code header -comments carefully. - - - - * Windows batchfiles for use with R / (GNU GPL v2) - http://code.google.com/p/batchfiles/ - https://github.com/ggrothendieck/batchfiles (new code repository) - + + +In the following directories you will find code which was not +written by The GRASS Development Team. + +Parts of GRASS are not copyright by The GRASS Development Team. +The original authors hold the copyrights and you have to abide +to their licensing terms where noted. +(Keep in mind that code linking into GRASS can only be distributed +if compatible with the GPL.) + +Specifically the source code below this directory is not necessarily +licensed under the GPL. Check the READMEs and source code header +comments carefully. + + + + * Windows batchfiles for use with R / (GNU GPL v2) + http://code.google.com/p/batchfiles/ + https://github.com/ggrothendieck/batchfiles (new code repository) + diff --git a/mswindows/external/rbatch/ANNOUNCE b/mswindows/external/rbatch/ANNOUNCE index 98618afec5f..a033c16b2d1 100644 --- a/mswindows/external/rbatch/ANNOUNCE +++ b/mswindows/external/rbatch/ANNOUNCE @@ -1,21 +1,21 @@ -A new version of the Windows batchfiles is available. +A new version of the Windows batchfiles is available. CHANGES -The key change is the new R.bat utility. R.bat has a new interface and +The key change is the new R.bat utility. R.bat has a new interface and extended functionality covering many of the other prior utilities. (These older utilities are no longer needed and have been removed.) Unlike R.bat which requires no configuration the new Rpathset.bat utility is configured by manually changing the Windows batch SET statements in it. The -main advantage is just that it is very simple internally which may be +main advantage is just that it is very simple internally which may be advantageous in some situations involving customization. A new pdf document accompanies the utilities providing more detail. OVERVIEW -These are self contained independent no-install Windows batch, javascript and +These are self contained independent no-install Windows batch, javascript and .hta files. Just place any that you wish to use on your Windows PATH. R.bat @@ -28,7 +28,7 @@ line do this: R gui R.bat locates R, MiKTeX and Rtools using the registry or heuristics and then -runs the subcommand indicated by the first argument. +runs the subcommand indicated by the first argument. In addition to the gui subcommand, the following subcommands are available: cd, cmd, dir, gui, help, ls, path, R, script, show, SetReg, tools, touch. @@ -48,7 +48,7 @@ R show -- show R_ variable values used (R_ROOT, R_HOME, R_VER, R_ARCH, etc.) R path -- temporarily add R, MiKTeX and Rtools to the Windows path R tools -- similar but only add MiKTeX and Rtools to the Windows path -Except for R touch (which updates the date on your R_HOME directory) and +Except for R touch (which updates the date on your R_HOME directory) and R SetReg (which calls RSetReg.exe to update the registry with your R version), R.bat makes no permanent changes to your system. @@ -56,16 +56,16 @@ Rpathset.bat Rpathset.bat temporarily sets the Windows path to R, Rtools and MiKTeX based on SET statements that the user can configure manually. It is an -alternative to R.bat that lacks R.bat's "no configuration" nature but may be +alternative to R.bat that lacks R.bat's "no configuration" nature but may be preferred in some situations due to its internal simplicity. -Also Rpathset.bat is more likely to work on systems that have not been -tested given its simplicity. (The utilities were tested on 32 bit Windows +Also Rpathset.bat is more likely to work on systems that have not been +tested given its simplicity. (The utilities were tested on 32 bit Windows Vista and 64 bit Windows 8 systems.) Other -Other commands which continue to be available are copydir.bat, movedir.bat, +Other commands which continue to be available are copydir.bat, movedir.bat, el.js, clip2r.js and find-miktex.hta . These copy and move R libraries, run a command in elevated mode (i.e. as Administrator), copy the clipboard to a running R instance and find MiKTeX. diff --git a/mswindows/external/rbatch/NEWS b/mswindows/external/rbatch/NEWS index ed028a3d445..2b53b78602a 100644 --- a/mswindows/external/rbatch/NEWS +++ b/mswindows/external/rbatch/NEWS @@ -10,7 +10,7 @@ Changes in version 0.7-1 Changes in version 0.7-0 - o R.bat reworked. It now has a with different interface and many prior + o R.bat reworked. It now has a with different interface and many prior batch files have been incorporated into it and removed o new Rpathset.bat @@ -33,7 +33,7 @@ Changes in version 0.6-5 %ProgramFiles%\MySQL\* - This allows one to install and run RMySQL without setting any environment + This allows one to install and run RMySQL without setting any environment variables. (Note that MySQL should be installed from the mysql site. xampp and wamp do not include the header files needed by RMySQL.) @@ -71,7 +71,7 @@ Changes in version 0.5-0 or higher. o new command find-miktex.hta can be run without arguments from the - Windows command line or double clicked from Windows Explorer + Windows command line or double clicked from Windows Explorer to show path to the MiKTeX bin directory. o Rversions.hta now also changes the .RData association and has @@ -90,14 +90,14 @@ Changes in version 0.5-0 Changes in version 0.4-3 - o Sweave.bat and Stangle.bat were not automatically finding MiKTeX. + o Sweave.bat and Stangle.bat were not automatically finding MiKTeX. Fixed. Changes in version 0.4-2 o can optionally work off initialization files in place of registry. - Place rbatchfilesrc.bat in current directory or %userprofile% (so - different directories can work off different versions of R, say) + Place rbatchfilesrc.bat in current directory or %userprofile% (so + different directories can work off different versions of R, say) or same directory as the other batchfiles and it will run it first. Typically rbatchfiles.bat would constain these two lines or similar: set R_HOME=C:\Program Files\R\R-2.7.0 @@ -110,13 +110,13 @@ Changes in version 0.4-1 o it is no longer necessary to set any paths to build R packages provided Rtools 2.7 or later is used. Rcmd.bat and the other scripts automatically find Rtools from the registry (including perl) - and if MikTeX is at %ProgramFiles%\MiKTeX* or %SystemDrive%:\MiKTex - then it will find MiKTeX too. New optional environment variables + and if MikTeX is at %ProgramFiles%\MiKTeX* or %SystemDrive%:\MiKTex + then it will find MiKTeX too. New optional environment variables R_TOOLS and R_MIKTEX are available to force specified paths to be used. o new Rtools.bat command that sets the path for the current cmd instance to the one that R*.bat files use internally. That is, rtools/bin, - rtools/perl/bin, rtools/MinGW/bin and MiKTeX .../miktex/bin are added + rtools/perl/bin, rtools/MinGW/bin and MiKTeX .../miktex/bin are added to the path. This is not needed to run or install R programs but only if you want to access the rtools for other purposes. @@ -162,7 +162,7 @@ Changes in version 0.4-0 by Dieter Menne. Changes in version 0.3-2 - + o sweave.bat now uses Rterm.bat rather than Rcmd.bat which makes it usable with a basic R installation (i.e. sh.exe not needed). Previously it required Rcmd.bat but now it requires Rterm.bat instead. @@ -173,7 +173,7 @@ Changes in Version 0.3-1 o new find-miktex.bat which lists the mixktex folders from the registry - o new Rscript.bat which allows one to use the Rscript facility in + o new Rscript.bat which allows one to use the Rscript facility in R 2.5.0 and later without changing pathnames. Just place Rscript.bat in any existing path and it will automatically find the current version of R from the registry and run the Rscript.exe that @@ -182,24 +182,24 @@ Changes in Version 0.3-1 o runR.bat. If you have an R script such as myfile.R then you can create a batch script for it by copying runR.bat to myfile.bat. Then when you issue the command myfile or myfile.bat it will run the R script in - myfile.R . Just place myfile.bat and myfile.R anywhere in your path. + myfile.R . Just place myfile.bat and myfile.R anywhere in your path. This uses Rscript.bat . o #Rscript. If you have an Rscript called myfile.R, say, then if you - copy the script to myfile.bat and place - #Rscript %0 %* + copy the script to myfile.bat and place + #Rscript %0 %* as the first line with the remainder being the R commands then issuing the command myfile or myfile.bat will run the R script. The advantage over the runR.bat method is that there is only one file, myfile.bat. - You don't need myfile.R anymore. The disadvantage is that it will - echo the #Rscript line to stdout. This will be fixed if and when - Rscript ever gets the perl/python/ruby -x flag. (The runR approach will - not echo additional lines but does require two files.) + You don't need myfile.R anymore. The disadvantage is that it will + echo the #Rscript line to stdout. This will be fixed if and when + Rscript ever gets the perl/python/ruby -x flag. (The runR approach will + not echo additional lines but does require two files.) o new Rtidy.bat is a sample Rscript that uses the #Rscript facility based on George Georgalis' UNIX code - o withgs.bat now checks for latest ghostscript version. (Previously + o withgs.bat now checks for latest ghostscript version. (Previously version was hard coded and it only worked for that version.) Changes in Version 0.3-0 @@ -211,13 +211,13 @@ Changes in Version 0.3-0 o new --tex, --pdf, --nobck.pdf switches are available on sweave. Also expanded help when entering sweave without args. - + Changes in Version 0.2-9 o updated README and other documentation files and inline documentation o added sweave.bat - + o new google code home page and svn repository http://code.google.com/p/batchfiles/ @@ -244,7 +244,7 @@ Changes in Version 0.2-6 o Rrefresh.bat has been removed (after having been deprecated in in previous versions of batchfiles). - o tested movedir.bat by using it to upgrade R-2.2.0pat to R-2.2.1. + o tested movedir.bat by using it to upgrade R-2.2.0pat to R-2.2.1. See instructions in README. Changes in Version 0.2-5 @@ -261,27 +261,27 @@ Changes in Version 0.2-3 R to another. (This is a temporary solution until R provides facilities for upgrading the libraries, expected in R 2.3.0 .) See README for usage. - + o eliminated all code associated with reading and manipulation of R_ENVIRON, R_PROFILE and R_LIBS simplifying the batch files. Use copydir.bat instead. - o Rversions.hta is a javascript GUI version of Rversions.bat + o Rversions.hta is a javascript GUI version of Rversions.bat Changes in Version 0.2-2 o added jgr.bat which starts up the JGR GUI. - o added Rversions.bat which can list the directories of all R versions + o added Rversions.bat which can list the directories of all R versions available and can set one to become the current R version. - o all batch scripts which used the environment variable name Rrw now + o all batch scripts which used the environment variable name Rrw now use the environment variable name R_HOME instead. - o Rcmd.bat, Rgui.bat, R.bat, jgr.bat files will now read R_ENVIRON, + o Rcmd.bat, Rgui.bat, R.bat, jgr.bat files will now read R_ENVIRON, if present, and set the R_LIBS definition in it, if present (unless R_LIBS is already defined as an environment variable). All R_ENVIRON - file syntax accepted by R is supported including comments (#), + file syntax accepted by R is supported including comments (#), var=value, var=${foo-bar} and recursions, var=${A-${B-C}}. o makepkg.bat internals were simplified due to previous point. @@ -290,7 +290,7 @@ Changes in Version 0.2-2 o updated THANKS. - o updated README. More introductory information. Also instructions + o updated README. More introductory information. Also instructions for Rgui shortcut will disable screen flash on startup. Corrections. Changes in Version 0.2-1 @@ -300,18 +300,18 @@ Changes in Version 0.2-1 Changes in Version 0.2-0 o can now support configurations without *.site files (as well as - configurations with *.site files) thereby reducing the minimum - configuration even further. + configurations with *.site files) thereby reducing the minimum + configuration even further. o Rcmd.bat, Rgui.bat and R.bat now temporarily set R_ENVIRON, - R_PROFILE and R_LIBS as needed so that it is no longer necessary to + R_PROFILE and R_LIBS as needed so that it is no longer necessary to copy the *.site files into the etc directory eliminating all reconfiguration when upgrading to a new version of R (except for refreshing MiKTeX). o new command miktex-refresh.bat is used to refresh MiKTeX after a new version of R is installed. Previously this was done in - Rrefresh.bat which is now deprecated. Rrefresh.bat is no longer + Rrefresh.bat which is now deprecated. Rrefresh.bat is no longer needed (unless you want each R version to have its own *.site files). o new NEWS, WISHLIST and RESOURCES files. diff --git a/mswindows/external/rbatch/R.bat b/mswindows/external/rbatch/R.bat index fd28421d126..8cbb2f65e88 100644 --- a/mswindows/external/rbatch/R.bat +++ b/mswindows/external/rbatch/R.bat @@ -1,6 +1,6 @@ -@Echo OFF +@Echo OFF -:: Software and documentation is (c) 2013 GKX Associates Inc. and +:: Software and documentation is (c) 2013 GKX Associates Inc. and :: licensed under [GPL 2.0](https://www.gnu.org/licenses/gpl-2.0.html). :: Help is at bottom of script or just run script with single argument: help @@ -19,7 +19,7 @@ if not defined R_REGISTRY set R_REGISTRY=1 set CYGWIN=nodosfilewarning -SetLocal EnableExtensions EnableDelayedExpansion +SetLocal EnableExtensions EnableDelayedExpansion :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: R_CMD @@ -62,11 +62,11 @@ rem echo R_CMD:%R_CMD% args=[%args%] ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :: 1. If .\Rgui.exe exist use implied R_PATH and skip remaining points. :: 2. If .\{x64,i386}\Rgui.exe or .\bin\{x64,i386}\Rgui.exe exists use implied R_HOME. -:: 3. if R_HOME defined then derive any of R_ROOT and R_VER that +:: 3. if R_HOME defined then derive any of R_ROOT and R_VER that :: are not already defined. :: 4. if R_PATH defined then derive any of R_ROOT, R_HOME, R_VER and R_ARCH that :: are not already defined. -:: 4a. If R_REGISTRY=1 and R found in registry derive any of R_HOME, R_ROOT and +:: 4a. If R_REGISTRY=1 and R found in registry derive any of R_HOME, R_ROOT and :: R_VER that are not already defined. :: 5. If R_ROOT not defined try %ProgramFiles%\R\*, %ProgramFiles(x86)%\R\* :: and then %SystemRoot%\R else error @@ -149,7 +149,7 @@ if defined R_HOME ( ) ) - + :: 5 if defined R_ROOT goto:R_ROOT_end @@ -385,10 +385,10 @@ goto:eof ver | findstr XP >NUL if not errorlevel 1 goto:Rtouch_next if not exist "%ProgramFiles%\R" goto:Rtouch_next -reg query "HKU\S-1-5-19" >NUL 2>&1 && ( goto Rtouch_next ) || ( +reg query "HKU\S-1-5-19" >NUL 2>&1 && ( goto Rtouch_next ) || ( echo Please run this as Administator. goto :eof -) +) :Rtouch_next if not defined R_HOME set R_HOME=%R_ROOT%\%R_VER% @@ -401,7 +401,7 @@ goto:eof ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: -:: set path +:: set path ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :Rpath endlocal & PATH %PATH%;%R_PATH% @@ -450,15 +450,15 @@ goto:eof :: Extract text from file: :: %1 = input string that starts text :: %2 = input file -:: final = output variable holding text from and including %1 until +:: final = output variable holding text from and including %1 until :: binary data encountered :: -:: Needs: SetLocal EnableExtensions EnableDelayedExpansion +:: Needs: SetLocal EnableExtensions EnableDelayedExpansion :: -:: Example: +:: Example: :: call :extract_string {app} C:\Rtools\unins000.dat :: echo %final% -:: where {app} is the string that starts extraction and +:: where {app} is the string that starts extraction and :: C:\Rtoolsiunins000.dat is the file :: :: Based on code by Frank Westlake, https://github.com/FrankWestlake @@ -467,61 +467,61 @@ goto:eof :extract_string - SetLocal EnableExtensions EnableDelayedExpansion + SetLocal EnableExtensions EnableDelayedExpansion - Set "string=%1" + Set "string=%1" Set "file=%2" - For /F "delims=" %%a in ( - 'findstr /C:"%string%" "%file%"^|MORE' - ) Do ( - Set "$=%%~a" - If /I "!$:~0,5!" EQU "%string%" ( - Set $=!$:;=" "! - For %%b in ("!$!") Do ( - Set "#=%%~b" - If "!#:~0,5!" EQU "%string%" ( + For /F "delims=" %%a in ( + 'findstr /C:"%string%" "%file%"^|MORE' + ) Do ( + Set "$=%%~a" + If /I "!$:~0,5!" EQU "%string%" ( + Set $=!$:;=" "! + For %%b in ("!$!") Do ( + Set "#=%%~b" + If "!#:~0,5!" EQU "%string%" ( CALL :work "!#!" - ) - ) - ) - ) + ) + ) + ) + ) endlocal & set final=%final% - Goto :EOF - :work + Goto :EOF + :work set final=%final%!#!; - Goto :EOF - - :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: - :trimPath: [segment to add] - :: Eliminates redundant path segments from the variable and - :: optionally adds new segmants. - :: Example: CALL :trimPath:PATH - :: Example: CALL :trimPath:PATH "C:\A & B" C:\a\b\c - :: - :: Note that only a colon separates the subroutine name and - :: the name of the variable to be edited. + Goto :EOF + + :::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: + :trimPath: [segment to add] + :: Eliminates redundant path segments from the variable and + :: optionally adds new segmants. + :: Example: CALL :trimPath:PATH + :: Example: CALL :trimPath:PATH "C:\A & B" C:\a\b\c + :: + :: Note that only a colon separates the subroutine name and + :: the name of the variable to be edited. :: - Frank Westlake, https://github.com/FrankWestlake - SetLocal EnableExtensions EnableDelayedExpansion - For /F "tokens=2 delims=:" %%a in ("%0") Do ( - For %%a in (%* !%%a!) Do ( - Set "#=%%~a" - For %%b in (!new!) Do If /I "!#!" EQU "%%~b" Set "#=" - If DEFINED # ( - If DEFINED new (Set "new=!new!;!#!") Else ( Set "new=!#!") - ) - ) - ) - EndLocal & For /F "tokens=2 delims=:" %%a in ("%0") Do Set "%%a=%new%" - Goto :EOF - -:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: + SetLocal EnableExtensions EnableDelayedExpansion + For /F "tokens=2 delims=:" %%a in ("%0") Do ( + For %%a in (%* !%%a!) Do ( + Set "#=%%~a" + For %%b in (!new!) Do If /I "!#!" EQU "%%~b" Set "#=" + If DEFINED # ( + If DEFINED new (Set "new=!new!;!#!") Else ( Set "new=!#!") + ) + ) + ) + EndLocal & For /F "tokens=2 delims=:" %%a in ("%0") Do Set "%%a=%new%" + Goto :EOF + +:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::: :Rhelp -echo (c) 2013 G. Grothendieck +echo (c) 2013 G. Grothendieck echo License: GPL 2.0 ( https://www.gnu.org/licenses/gpl-2.0.html ) -echo Launch script for R and associated functions. +echo Launch script for R and associated functions. echo Usage: R.bat [subcommand] [arguments] echo Subcommands where (0) means takes no arguments; (A) means may need Admin priv echo cd - cd to R_ROOT, typically to C:\Program Files\R (0) @@ -555,35 +555,35 @@ echo start set R_HOME=%ProgramFiles%\R\R-2.14.0 ^& R gui echo echo ==Customization by renaming== echo. -echo If the optional first argument is missing then it uses the value of -echo the environment variable R_CMD or if that is not set it uses the name of -echo the script file as the default first argument. The idea is one could have +echo If the optional first argument is missing then it uses the value of +echo the environment variable R_CMD or if that is not set it uses the name of +echo the script file as the default first argument. The idea is one could have echo multiple versions of the script called R.bat, Rgui.bat, etc. which invoke echo the corresponding functionality without having to specify first argument. echo. echo ==Customization by setting environment variables at top of script== echo. -echo It can be customized by setting any of R_CMD, R_HOME, R_ARCH, -echo R_MIKTEX_PATH, R_TOOLS after the @echo off command at the top of the -echo script. R_CMD will be used as the default first argument (instead of the -echo script name). +echo It can be customized by setting any of R_CMD, R_HOME, R_ARCH, +echo R_MIKTEX_PATH, R_TOOLS after the @echo off command at the top of the +echo script. R_CMD will be used as the default first argument (instead of the +echo script name). echo. echo e.g. use the following after @echo off to force 32-bit echo set R_ARCH=32 echo. -echo e.g. use the following after @echo off to force a particular version of +echo e.g. use the following after @echo off to force a particular version of echo R to be used echo set R_HOME=%ProgramFiles%\R\R-2.14.0 echo. -echo e.g. use the following after @echo off to change the default command to +echo e.g. use the following after @echo off to change the default command to echo Rgui even if the script is called myRgui.bat, say: echo set R_CMD=Rgui echo. echo ==Installation== -echo. +echo. echo The script is self contained so just place it anywhere on your Windows echo PATH. (From the Windows cmd line the command PATH shows your current -echo Windows path.) You may optionally make copies of this script with names +echo Windows path.) You may optionally make copies of this script with names echo like R.bat, Rscript.bat, Rcmd.bat so that each has a different default. echo. diff --git a/mswindows/external/rbatch/README.grass b/mswindows/external/rbatch/README.grass index 40bc7a307a7..5566aba14d6 100644 --- a/mswindows/external/rbatch/README.grass +++ b/mswindows/external/rbatch/README.grass @@ -1,11 +1,11 @@ README.grass - written by Helmut Kudrnovsky - alectoria gmx.at -This directory contains windows batch files for installing and maintaining R, for +This directory contains windows batch files for installing and maintaining R, for an improved coupling of GRASS GIS and R (www.r-project.org) in a windows environment. -- -Files integrated from +Files integrated from svn checkout http://batchfiles.googlecode.com/svn/trunk/ diff --git a/mswindows/external/rbatch/README.html b/mswindows/external/rbatch/README.html index 104405115b2..71335b5915a 100644 --- a/mswindows/external/rbatch/README.html +++ b/mswindows/external/rbatch/README.html @@ -2,7 +2,7 @@

    Batchfiles

    -Home Page: batchfiles home page. +Home Page: batchfiles home page.

    Discuss: sqldf dicussion group is being used for discussion of this software too.

    diff --git a/mswindows/external/rbatch/RESOURCES b/mswindows/external/rbatch/RESOURCES index 39471b824e4..d5ac89da8af 100644 --- a/mswindows/external/rbatch/RESOURCES +++ b/mswindows/external/rbatch/RESOURCES @@ -1,7 +1,7 @@ GENERAL RESOURCES ON WINDOWS BATCH FILE PROGRAMMMING ---------------------------------------------------- -The Windows command line commands will bring up help information +The Windows command line commands will bring up help information that is particularly useful: help set @@ -18,7 +18,7 @@ Here are some links on Windows batch file programmming. http://groups-beta.google.com/group/alt.msdos.batch.nt/msg/5a9587e871c27a75 - cmd bugs - http://groups-beta.google.com/group/alt.msdos.batch/msg/7b1d22945c89af75 - cmd help resources + http://groups-beta.google.com/group/alt.msdos.batch/msg/7b1d22945c89af75 - cmd help resources http://msdn.microsoft.com/downloads/list/webdev.asp - Windows script downloads @@ -28,9 +28,9 @@ Here are some links on Windows batch file programmming. http://www.commandline.co.uk - Ritchie Lawrence cmd line utilities - http://www.cybermesa.com/~bstewart/ - Bill Stewarts scripting tools + http://www.cybermesa.com/~bstewart/ - Bill Stewart�s scripting tools - http://www.fpschultze.de - FP Shcultzes batch tricks + http://www.fpschultze.de - FP Shcultze�s batch tricks http://www.microsoft.com/technet/community/columns/scripts - MS TechNet scripting diff --git a/mswindows/external/rbatch/Rpathset.bat b/mswindows/external/rbatch/Rpathset.bat index 590e9a58980..d3762ce66f2 100644 --- a/mswindows/external/rbatch/Rpathset.bat +++ b/mswindows/external/rbatch/Rpathset.bat @@ -1,10 +1,10 @@ -:: Software and documentation is (c) 2013 GKX Associates Inc. and +:: Software and documentation is (c) 2013 GKX Associates Inc. and :: licensed under [GPL 2.0](https://www.gnu.org/licenses/gpl-2.0.html). :: Purpose: setup path to use R, Rtools and other utilities from cmd line. :: :: Makes no permanent system changes. Does not read or write registry. -:: Temporarily prepends to PATH and sets environment variables for current +:: Temporarily prepends to PATH and sets environment variables for current :: Windows cmd line session only. :: :: Use: Run this each time you launch cmd.exe and want to use R or Rtools. @@ -14,13 +14,13 @@ :: Install: Modify set statements appropriately for your installation. :: and then place this batch script anywhre on your existing path. :: (The Windows commandline command PATH shows the current PATH.) -:: +:: :: In many cases no changes are needed at all in this file. :: R_HOME and R_ARCH are the most likely that may need to be changed. :: :: Report bugs to: :: ggrothendieck at gmail.com -:: +:: :: License: GPL 2.0 :: Go into R and issue this command: normalizePath(R.home()) @@ -29,7 +29,7 @@ :: R is available from: http://www.r-project.org set R_HOME=C:\Program Files\R\R-3.1.0 -:: 32 or 64 bit version of R. +:: 32 or 64 bit version of R. :: (If you wish to use both versions of R make two versions of this file.) :: set R_ARCH=i386 set R_ARCH=x64 @@ -38,21 +38,21 @@ set R_ARCH=x64 set R_PATH=%R_HOME%\bin\%R_ARCH% :: directory path where Rtools was installed. Usually best to use default -:: which is the one shown below. Note that different versions of R may +:: which is the one shown below. Note that different versions of R may :: require different versions of Rtools. :: Rtools is available from: http://cran.r-project.org/bin/windows/Rtools/ set R_TOOLS=C:\Rtools -:: If in future Rtools changes the required paths then modify accordingly. -:: To check, run the following findstr command which lists the R_TOOLS_PATH +:: If in future Rtools changes the required paths then modify accordingly. +:: To check, run the following findstr command which lists the R_TOOLS_PATH :: (plus some garbage): :: findstr {app} %R_TOOLS%\unins000.dat set R_TOOLS_PATH=%R_TOOLS%\bin;%R_TOOLS%\gcc-4.6.3\bin :: From within R, the R_USER directory path can be viewed like this: :: cat(normalizePath('~')) -:: It contains your personal .Rprofile, if any, and unless set otherwise -:: %R_USER%\R\win-library contains your personal R library of packages +:: It contains your personal .Rprofile, if any, and unless set otherwise +:: %R_USER%\R\win-library contains your personal R library of packages :: (from CRAN and elsewhere). set R_USER=%userprofile%\Documents @@ -74,11 +74,11 @@ set R_MIKTEX_PATH=C:\Program Files (x86)\MiKTeX 2.9\miktex\bin :: This is only needed to run JGR and Deducer. :: R_LIBS is the system library. -:: If you have installed at least one package (at which point R will ask to +:: If you have installed at least one package (at which point R will ask to :: set up a personal library -- which you should allow) then R_LIBS_USER :: is similar to output of .libPaths() with first comnponent being your -:: personal library and second component being library holding packages that -:: come with R. +:: personal library and second component being library holding packages that +:: come with R. :: Be sure NOT to store the packages that you downloaded from CRAN :: in the %R_HOME%\library directory. :: set R_LIBS=%R_USER%\R\win-library\2.15 diff --git a/mswindows/external/rbatch/batchfiles.tex b/mswindows/external/rbatch/batchfiles.tex index acfa8c6e634..87653c10b0d 100644 --- a/mswindows/external/rbatch/batchfiles.tex +++ b/mswindows/external/rbatch/batchfiles.tex @@ -275,7 +275,7 @@ \subsection{Rpathset.bat} An alternative to \begin{verbatim} -R.bat path +R.bat path \end{verbatim} is the \texttt{Rpathset.bat}. Unlike \texttt{R.bat}, @@ -300,7 +300,7 @@ \subsection{movedir.bat and copydir.bat} \begin{verbatim} cd %userprofile%\\Documents\\win-library copydir 2.15\\library 3.0\\library -R.bat gui +R.bat gui ... now enter update.packages() into R... \end{verbatim} diff --git a/mswindows/external/rbatch/copydir.bat b/mswindows/external/rbatch/copydir.bat index 9ab646e6e0c..3e80f2054d3 100644 --- a/mswindows/external/rbatch/copydir.bat +++ b/mswindows/external/rbatch/copydir.bat @@ -1,12 +1,12 @@ @echo off -:: Software and documentation is (c) 2013 GKX Associates Inc. and +:: Software and documentation is (c) 2013 GKX Associates Inc. and :: licensed under [GPL 2.0](https://www.gnu.org/licenses/gpl-2.0.html). setlocal if not "%2"=="" goto:run echo Usage: copydir fromdir todir -echo All files/directories in fromdir that do not also exist in todir are +echo All files/directories in fromdir that do not also exist in todir are echo recursively copied. -echo e.g. +echo e.g. echo cd "%userprofile%\Documents\R\win-library" echo copydir 2.14 2.15 echo Now start up R 2.15.x and issue update.packages() diff --git a/mswindows/external/rbatch/find-miktex.hta b/mswindows/external/rbatch/find-miktex.hta index 43a994fa5d9..8b7603723a2 100644 --- a/mswindows/external/rbatch/find-miktex.hta +++ b/mswindows/external/rbatch/find-miktex.hta @@ -5,7 +5,7 @@ .highlight {background:#ff00ff} .text {color:#ff00ff} .both {color:white;background:black} - + find-miktex @@ -25,7 +25,7 @@ while (true) { i++; } catch(e) {break}; } - + diff --git a/mswindows/external/rbatch/make-batchfiles-pdf.bat b/mswindows/external/rbatch/make-batchfiles-pdf.bat index 5e133489ff0..f366db29176 100644 --- a/mswindows/external/rbatch/make-batchfiles-pdf.bat +++ b/mswindows/external/rbatch/make-batchfiles-pdf.bat @@ -1,4 +1,3 @@ - :: make pdf documentation. To run: :: 1. install pandoc from http://code.google.com/p/pandoc/downloads/list :: 2. run this file diff --git a/mswindows/external/rbatch/movedir.bat b/mswindows/external/rbatch/movedir.bat index 57fb0a7a498..53c35ae7487 100644 --- a/mswindows/external/rbatch/movedir.bat +++ b/mswindows/external/rbatch/movedir.bat @@ -1,12 +1,12 @@ @echo off -:: Software and documentation is (c) 2013 GKX Associates Inc. and +:: Software and documentation is (c) 2013 GKX Associates Inc. and :: licensed under [GPL 2.0](https://www.gnu.org/licenses/gpl-2.0.html). setlocal if not "%2"=="" goto:run echo Usage: copydir fromdir todir -echo All files/directories in fromdir that do not also exist in todir are +echo All files/directories in fromdir that do not also exist in todir are echo recursively copied. -echo e.g. +echo e.g. echo cd "%userprofile%\Documents\R\win-library" echo movedir 2.14 2.15 echo Now start up R 2.15.x and issue update.packages() diff --git a/mswindows/generic.manifest b/mswindows/generic.manifest index 0855fe9cba5..bd0ea40eea3 100644 --- a/mswindows/generic.manifest +++ b/mswindows/generic.manifest @@ -1,10 +1,10 @@ - + - GRASS modules + type="win32"/> + GRASS modules diff --git a/mswindows/osgeo4w/build_osgeo4w.sh b/mswindows/osgeo4w/build_osgeo4w.sh index 98ed16aac1e..252b615339f 100755 --- a/mswindows/osgeo4w/build_osgeo4w.sh +++ b/mswindows/osgeo4w/build_osgeo4w.sh @@ -21,6 +21,8 @@ export C_INCLUDE_PATH=".:${OSGEO4W_ROOT_MSYS}/include:${SRC}/dist.${ARCH}/includ export PYTHONHOME=${OSGEO4W_ROOT_MSYS}/apps/Python312 export ARCH=x86_64-w64-mingw32 +CFLAGS="$CFLAGS -pipe" \ +CXXFLAGS="$CXXFLAGS -pipe" \ ./configure \ --bindir=${OSGEO4W_ROOT_MSYS}/bin \ --enable-largefile \ @@ -34,7 +36,7 @@ export ARCH=x86_64-w64-mingw32 --with-cairo \ --with-cairo-includes=${OSGEO4W_ROOT_MSYS}/include \ --with-cairo-ldflags="-L${SRC}/mswindows/osgeo4w/lib -lcairo" \ - --with-cairo-libs=$OSGEO4W_ROOT_MSYS/lib \ + --with-cairo-libs=${OSGEO4W_ROOT_MSYS}/lib \ --with-cxx \ --with-fftw \ --with-freetype \ @@ -44,6 +46,7 @@ export ARCH=x86_64-w64-mingw32 --with-includes=${OSGEO4W_ROOT_MSYS}/include \ --with-lapack \ --with-liblas=${SRC}/mswindows/osgeo4w/liblas-config \ + --with-libpng=${SRC}/mswindows/osgeo4w/libpng-config \ --with-libs="${OSGEO4W_ROOT_MSYS}/lib ${OSGEO4W_ROOT_MSYS}/bin" \ --with-netcdf=${OSGEO4W_ROOT_MSYS}/bin/nc-config \ --with-nls \ diff --git a/mswindows/osgeo4w/env.bat.tmpl b/mswindows/osgeo4w/env.bat.tmpl index 200c86f2687..04065161a78 100644 --- a/mswindows/osgeo4w/env.bat.tmpl +++ b/mswindows/osgeo4w/env.bat.tmpl @@ -8,6 +8,8 @@ REM Uncomment if you want to use Bash instead of Cmd REM Note that msys package must be also installed REM set GRASS_SH=%OSGEO4W_ROOT%\apps\msys\bin\sh.exe +set PYTHONPATH=%OSGEO4W_ROOT%\apps\grass\grass@POSTFIX@\etc\python;%PYTHONPATH% +set GRASS_COMPATIBILITY_TEST=0 set GRASS_PYTHON=%OSGEO4W_ROOT%\bin\python3.exe set GRASS_PROJSHARE=%OSGEO4W_ROOT%\share\proj diff --git a/mswindows/osgeo4w/gdal-config b/mswindows/osgeo4w/gdal-config index 1917531b73e..9ad585f47b5 100755 --- a/mswindows/osgeo4w/gdal-config +++ b/mswindows/osgeo4w/gdal-config @@ -25,7 +25,7 @@ if test $# -eq 0; then usage 1 1>&2 fi -case $1 in +case $1 in --libs) echo $CONFIG_LIBS ;; diff --git a/mswindows/osgeo4w/geos-config b/mswindows/osgeo4w/geos-config index 6b52971ee80..523059726ee 100755 --- a/mswindows/osgeo4w/geos-config +++ b/mswindows/osgeo4w/geos-config @@ -32,7 +32,7 @@ case $1 in echo $OSGEO4W_ROOT_MSYS/lib/geos_c.lib ;; --ldflags) - echo + echo ;; --includes) echo $OSGEO4W_ROOT_MSYS/include diff --git a/mswindows/osgeo4w/liblas-config b/mswindows/osgeo4w/liblas-config index 0fbef69b696..05212ae94fc 100755 --- a/mswindows/osgeo4w/liblas-config +++ b/mswindows/osgeo4w/liblas-config @@ -7,57 +7,57 @@ INCLUDES="-I${prefix}/include " LIBS="-L$libdir -llas -llas_c" GDAL_INCLUDE="" -if test -n "$GDAL_INCLUDE" ; then +if test -n "$GDAL_INCLUDE" ; then INCLUDES="$INCLUDES -I$GDAL_INCLUDE" fi GDAL_LIBRARY="" -if test -n "$GDAL_LIBRARY" ; then +if test -n "$GDAL_LIBRARY" ; then LIBS="$LIBS $GDAL_LIBRARY" fi GEOTIFF_INCLUDE="" -if test -n "$GEOTIFF_INCLUDE" ; then +if test -n "$GEOTIFF_INCLUDE" ; then INCLUDES="$INCLUDES -I$GEOTIFF_INCLUDE" fi GEOTIFF_LIBRARY="" -if test -n "$GEOTIFF_LIBRARY" ; then +if test -n "$GEOTIFF_LIBRARY" ; then LIBS="$LIBS $GEOTIFF_LIBRARY" fi ORACLE_INCLUDE="" -if test -n "$ORACLE_INCLUDE" ; then +if test -n "$ORACLE_INCLUDE" ; then INCLUDES="$INCLUDES -I$ORACLE_INCLUDE" fi ORACLE_OCI_LIBRARY="" -if test -n "$ORACLE_OCI_LIBRARY" ; then +if test -n "$ORACLE_OCI_LIBRARY" ; then LIBS="$LIBS $ORACLE_OCI_LIBRARY " fi TIFF_INCLUDE="" -if test -n "$TIFF_INCLUDE" ; then +if test -n "$TIFF_INCLUDE" ; then INCLUDES="$INCLUDES -I$TIFF_INCLUDE" fi TIFF_LIBRARY="" -if test -n "$TIFF_LIBRARY" ; then +if test -n "$TIFF_LIBRARY" ; then LIBS="$LIBS $TIFF_LIBRARY" fi LIBXML2_INCLUDE_DIR="" -if test -n "$LIBXML2_INCLUDE_DIR" ; then +if test -n "$LIBXML2_INCLUDE_DIR" ; then INCLUDES="$INCLUDES -I$LIBXML2_INCLUDE_DIR" fi LIBXML2_LIBRARIES="" -if test -n "$LIBXML2_LIBRARIES" ; then +if test -n "$LIBXML2_LIBRARIES" ; then LIBS="$LIBS $LIBXML2_LIBRARIES" fi LASZIP_INCLUDE_DIR="" -if test -n "$LASZIP_INCLUDE_DIR" ; then +if test -n "$LASZIP_INCLUDE_DIR" ; then INCLUDES="$INCLUDES -I$LASZIP_INCLUDE_DIR" fi LASZIP_LIBRARY="" -if test -n "$LASZIP_LIBRARY" ; then +if test -n "$LASZIP_LIBRARY" ; then LIBS="$LIBS $LASZIP_LIBRARY" fi @@ -80,8 +80,8 @@ if test $# -eq 0; then usage 1 1>&2 fi -case $1 in - --libs) +case $1 in + --libs) echo $OSGEO4W_ROOT_MSYS/lib/liblas_c.lib ;; @@ -94,7 +94,7 @@ case $1 in ;; --defines) - echo + echo ;; --includes) @@ -102,13 +102,13 @@ case $1 in ;; --cflags) - echo + echo ;; --cxxflags) echo -pedantic -ansi -Wall -Wpointer-arith -Wcast-align -Wcast-qual -Wfloat-equal -Wredundant-decls -Wno-long-long -std=c++98 ;; - + *) usage 1 1>&2 ;; diff --git a/mswindows/osgeo4w/libpng-config b/mswindows/osgeo4w/libpng-config index f00a136be1e..e8d8be2f671 100755 --- a/mswindows/osgeo4w/libpng-config +++ b/mswindows/osgeo4w/libpng-config @@ -14,7 +14,7 @@ prefix="${OSGEO4W_ROOT_MSYS}" version="$(sed '/^#define PNG_LIBPNG_VER_STRING/!d; s/^[^"]*"\|"//g' ${prefix}/include/libpng*/png.h)" -dll_version="$(sed '/^#define PNG_LIBPNG_VER_DLLNUM/!d; s/^[^0-9]*\|[^0-9]*$//g' ${prefix}/include/libpng*/png.h)" +dll_version="$(sed '/^#define PNG_LIBPNG_VER_SHAREDLIB/!d; s/^[^0-9]*\|[^0-9]*$//g' ${prefix}/include/libpng*/png.h)" exec_prefix="${prefix}" libdir="${prefix}/lib" includedir="${prefix}/include/libpng${dll_version}" diff --git a/mswindows/osgeo4w/mysql_config b/mswindows/osgeo4w/mysql_config index 11204eb9c72..d6b96ed2f13 100755 --- a/mswindows/osgeo4w/mysql_config +++ b/mswindows/osgeo4w/mysql_config @@ -1,15 +1,15 @@ #!/bin/sh # Copyright (c) 2000, 2016, Oracle and/or its affiliates. All rights reserved. -# +# # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License. -# +# # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. -# +# # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA @@ -109,7 +109,7 @@ else port=3306 fi -# Create options +# Create options # We intentionally add a space to the beginning and end of lib strings, simplifies replace later libs=" $ldflags -L$pkglibdir -lmysqlclient -lpthread -lz -lm -ldl " libs="$libs " @@ -131,19 +131,19 @@ for remove in DDBUG_OFF DSAFE_MUTEX DFORCE_INIT_OF_VARS \ do # The first option we might strip will always have a space before it because # we set -I$pkgincludedir as the first option - cflags=`echo "$cflags"|sed -e "s/ -$remove */ /g"` - cxxflags=`echo "$cxxflags"|sed -e "s/ -$remove */ /g"` + cflags=`echo "$cflags"|sed -e "s/ -$remove */ /g"` + cxxflags=`echo "$cxxflags"|sed -e "s/ -$remove */ /g"` done -cflags=`echo "$cflags"|sed -e 's/ *\$//'` -cxxflags=`echo "$cxxflags"|sed -e 's/ *\$//'` +cflags=`echo "$cflags"|sed -e 's/ *\$//'` +cxxflags=`echo "$cxxflags"|sed -e 's/ *\$//'` # Same for --libs(_r) for remove in lmtmalloc static-libcxa i-static static-intel do # We know the strings starts with a space - libs=`echo "$libs"|sed -e "s/ -$remove */ /g"` - libs_r=`echo "$libs_r"|sed -e "s/ -$remove */ /g"` - embedded_libs=`echo "$embedded_libs"|sed -e "s/ -$remove */ /g"` + libs=`echo "$libs"|sed -e "s/ -$remove */ /g"` + libs_r=`echo "$libs_r"|sed -e "s/ -$remove */ /g"` + embedded_libs=`echo "$embedded_libs"|sed -e "s/ -$remove */ /g"` done # Strip trailing and ending space if any, and '+' (FIXME why?) diff --git a/mswindows/osgeo4w/package.sh b/mswindows/osgeo4w/package.sh index c189cf92c93..50d5b9ddc4d 100755 --- a/mswindows/osgeo4w/package.sh +++ b/mswindows/osgeo4w/package.sh @@ -1,8 +1,9 @@ -#!/usr/bin/bash +#!/bin/bash set -e -PWD="$(pwd)" +export ARCH=x86_64-w64-mingw32 +export SRC=$PWD if ! [ -d mswindows ]; then echo Start from GRASS toplevel dir @@ -106,35 +107,6 @@ fi exec 3>&1 > >(tee mswindows/osgeo4w/package.log) 2>&1 -DLLS=" - /mingw64/bin/libblas.dll - /mingw64/bin/libbrotlicommon.dll - /mingw64/bin/libbrotlidec.dll - /mingw64/bin/libbz2-1.dll - /mingw64/bin/libcairo-2.dll - /mingw64/bin/libfftw3-3.dll - /mingw64/bin/libfontconfig-1.dll - /mingw64/bin/libfreetype-6.dll - /mingw64/bin/libgcc_s_seh-1.dll - /mingw64/bin/libgfortran-5.dll - /mingw64/bin/libglib-2.0-0.dll - /mingw64/bin/libgomp-1.dll - /mingw64/bin/libgraphite2.dll - /mingw64/bin/libharfbuzz-0.dll - /mingw64/bin/libiconv-2.dll - /mingw64/bin/libintl-8.dll - /mingw64/bin/liblapack.dll - /mingw64/bin/libpcre-1.dll - /mingw64/bin/libpixman-1-0.dll - /mingw64/bin/libpng16-16.dll - /mingw64/bin/libquadmath-0.dll - /mingw64/bin/libstdc++-6.dll - /mingw64/bin/libsystre-0.dll - /mingw64/bin/libtre-5.dll - /mingw64/bin/libwinpthread-1.dll - /mingw64/bin/zlib1.dll -" - if ! [ -f mswindows/osgeo4w/configure-stamp ]; then if [ -e include/Make/Platform.make ] ; then log make distclean @@ -144,53 +116,54 @@ if ! [ -f mswindows/osgeo4w/configure-stamp ]; then log remove old logs rm -f mswindows/osgeo4w/package.log.* - mkdir -p dist.x86_64-w64-mingw32/bin - cp -uv $DLLS dist.x86_64-w64-mingw32/bin - mkdir -p mswindows/osgeo4w/lib - cp -uv $OSGEO4W_ROOT_MSYS/lib/libpq.lib mswindows/osgeo4w/lib/pq.lib cp -uv $OSGEO4W_ROOT_MSYS/lib/sqlite3_i.lib mswindows/osgeo4w/lib/sqlite3.lib + log configure + CFLAGS="$CFLAGS -pipe" \ + CXXFLAGS="$CXXFLAGS -pipe" \ ./configure \ - --bindir=$OSGEO4W_ROOT_MSYS/bin \ + --bindir=${OSGEO4W_ROOT_MSYS}/bin \ --enable-largefile \ --enable-shared \ - --host=x86_64-w64-mingw32 \ - --includedir=$OSGEO4W_ROOT_MSYS/include \ - --libexecdir=$OSGEO4W_ROOT_MSYS/bin \ - --prefix=$OSGEO4W_ROOT_MSYS/apps/grass \ + --host=${ARCH} \ + --includedir=${OSGEO4W_ROOT_MSYS}/include \ + --libexecdir=${OSGEO4W_ROOT_MSYS}/bin \ + --prefix=${OSGEO4W_ROOT_MSYS}/apps/grass \ --with-blas \ --with-bzlib \ --with-cairo \ - --with-cairo-includes=$OSGEO4W_ROOT_MSYS/include \ - --with-cairo-ldflags="-L$PWD/mswindows/osgeo4w/lib -lcairo -lfontconfig" \ + --with-cairo-includes=${OSGEO4W_ROOT_MSYS}/include \ + --with-cairo-ldflags="-L${SRC}/mswindows/osgeo4w/lib -lcairo" \ + --with-cairo-libs=${OSGEO4W_ROOT_MSYS}/lib \ --with-cxx \ --with-fftw \ --with-freetype \ - --with-freetype-includes=/mingw64/include/freetype2 \ - --with-gdal=$PWD/mswindows/osgeo4w/gdal-config \ - --with-geos=$PWD/mswindows/osgeo4w/geos-config \ - --with-includes=$OSGEO4W_ROOT_MSYS/include \ + --with-freetype-includes=${OSGEO4W_ROOT_MSYS}/include/freetype2 \ + --with-gdal=${SRC}/mswindows/osgeo4w/gdal-config \ + --with-geos=${SRC}/mswindows/osgeo4w/geos-config \ + --with-includes=${OSGEO4W_ROOT_MSYS}/include \ --with-lapack \ - --with-lapack-includes=/mingw64/include \ - --with-liblas=$PWD/mswindows/osgeo4w/liblas-config \ - --with-libs="$OSGEO4W_ROOT_MSYS/lib" \ + --with-liblas=${SRC}/mswindows/osgeo4w/liblas-config \ + --with-libpng=${SRC}/mswindows/osgeo4w/libpng-config \ + --with-libs="${OSGEO4W_ROOT_MSYS}/lib ${OSGEO4W_ROOT_MSYS}/bin" \ --with-netcdf=${OSGEO4W_ROOT_MSYS}/bin/nc-config \ --with-nls \ --with-odbc \ --with-opengl=windows \ --with-openmp \ --with-postgres \ - --with-postgres-includes=$OSGEO4W_ROOT_MSYS/include \ - --with-postgres-libs=$PWD/mswindows/osgeo4w/lib \ - --with-proj-includes=$OSGEO4W_ROOT_MSYS/include \ - --with-proj-libs=$OSGEO4W_ROOT_MSYS/lib \ - --with-proj-share=$OSGEO4W_ROOT_MSYS/share/proj \ + --with-postgres-includes=${OSGEO4W_ROOT_MSYS}/include \ + --with-postgres-libs=${OSGEO4W_ROOT_MSYS}/lib \ + --with-proj-includes=${OSGEO4W_ROOT_MSYS}/include \ + --with-proj-libs=${OSGEO4W_ROOT_MSYS}/lib \ + --with-proj-share=${OSGEO4W_ROOT_MSYS}/share/proj \ + --with-readline \ --with-regex \ --with-sqlite \ - --with-sqlite-includes=$OSGEO4W_ROOT_MSYS/include \ - --with-sqlite-libs=$PWD/mswindows/osgeo4w/lib \ + --with-sqlite-includes=${OSGEO4W_ROOT_MSYS}/include \ + --with-sqlite-libs=${OSGEO4W_ROOT_MSYS}/lib \ --with-zstd \ --without-pdal \ --without-x @@ -246,9 +219,11 @@ if [ -n "$PACKAGE_PATCH" ]; then unix2dos etc/postinstall/grass${PACKAGE_POSTFIX}.bat unix2dos etc/preremove/grass${PACKAGE_POSTFIX}.bat - # copy dependencies (TODO: to be reduced) - cp -uv $DLLS apps/grass/grass$POSTFIX/bin - cp -uv /mingw64/etc/fonts/fonts.conf apps/grass/grass$POSTFIX/etc + # copy dependencies + cp -uv $(/usr/bin/find apps/grass/grass$POSTFIX -iname "*.dll" -o -iname "*.exe" | PATH=$PWD/apps/grass/grass$POSTFIX/lib:$PWD/bin:/mingw64/bin:/usr/bin /usr/bin/xargs /usr/bin/ldd | /usr/bin/sed -ne 's#^.* => \(/mingw64/bin/.*\) (.*)$#\1#p' | /usr/bin/sort -u) apps/grass/grass$POSTFIX/bin + + # copy R batch files + cp -uv $SRC/mswindows/external/rbatch/* apps/grass/grass$POSTFIX/bin # creating grass package /bin/tar -cjf $PDIR/grass$PACKAGE_POSTFIX-$VERSION-$PACKAGE_PATCH.tar.bz2 \ diff --git a/mswindows/osgeo4w/postinstall.bat b/mswindows/osgeo4w/postinstall.bat index 61d2d98b7d4..10e8f727673 100644 --- a/mswindows/osgeo4w/postinstall.bat +++ b/mswindows/osgeo4w/postinstall.bat @@ -5,8 +5,8 @@ textreplace -std -t "%OSGEO4W_ROOT%\apps\grass\grass@POSTFIX@\etc\fontcap" for /F "tokens=* USEBACKQ" %%F IN (`getspecialfolder Documents`) do set DOCUMENTS=%%F -if not %OSGEO4W_MENU_LINKS%==0 xxmklink "%OSGEO4W_STARTMENU%\GRASS GIS @VERSION@.lnk" "%BATCH%" "--gui" "%DOCUMENTS%" "Launch GRASS GIS @VERSION@" 1 "%ICON%" -if not %OSGEO4W_DESKTOP_LINKS%==0 xxmklink "%OSGEO4W_DESKTOP%\GRASS GIS @VERSION@.lnk" "%BATCH%" "--gui" "%DOCUMENTS%" "Launch GRASS GIS @VERSION@" 1 "%ICON%" +if not %OSGEO4W_MENU_LINKS%==0 xxmklink "%OSGEO4W_STARTMENU%\GRASS GIS @VERSION@.lnk" "%BATCH%" "--gui" "%DOCUMENTS%" "Launch GRASS GIS @VERSION@" 1 "%ICON%" +if not %OSGEO4W_DESKTOP_LINKS%==0 xxmklink "%OSGEO4W_DESKTOP%\GRASS GIS @VERSION@.lnk" "%BATCH%" "--gui" "%DOCUMENTS%" "Launch GRASS GIS @VERSION@" 1 "%ICON%" rem run g.mkfontcap outside a GRASS session during rem an OSGeo4W installation for updating paths to fonts diff --git a/ps/ps.map/ps.map.html b/ps/ps.map/ps.map.html index 23fd71a8116..c4d1c5a00b6 100644 --- a/ps/ps.map/ps.map.html +++ b/ps/ps.map/ps.map.html @@ -14,7 +14,7 @@

    NOTES

    vpoints entry first. Raster maps are always drawn first, and only a single raster map (or 3 if part of a RGB group) may be used. -

    The hash character ('#') may be used at the beginning of a line +

    The hash character ('#') may be used at the beginning of a line to indicate that the line is a comment. Blank lines will also be ignored.

    Be aware that some mapping instructions require the end command @@ -29,7 +29,7 @@

    NOTES

    US-Letter sized paper at 600dpi, with 1" margins and the raster filling the entire page, the usable area on the page will be 6.5" x 9", which at 600 dots/inch is equivalent to a region of 3900 columns x 5400 rows (see -"g.region -p"). Any higher resolution settings will make the +"g.region -p"). Any higher resolution settings will make the output file larger, but with a consumer printer you probably won't be able to resolve any better detail in the hardcopy. @@ -41,14 +41,14 @@

    NOTES

    One point ("pixel") is 1/72 of an inch.

    For users wanting to use special characters (such as accented characters) it -is important to note that ps.map uses ISO-8859-1 encoding. +is important to note that ps.map uses ISO-8859-1 encoding. This means that your instructions file will have to be encoded in this encoding. If you normally work in a different encoding environment (such as -UTF-8), you have to transform your file to the ISO-8859-1 -encoding, for example by using the iconv utility: +UTF-8), you have to transform your file to the ISO-8859-1 +encoding, for example by using the iconv utility:

    -iconv -f UTF-8 -t ISO_8859-1 utf_file > iso_file
    +iconv -f UTF-8 -t ISO_8859-1 utf_file > iso_file
     
    @@ -116,7 +116,7 @@

    Common instructions

    The name of the PostScript font. Fonts present in all PostScript implementations are: - + Times-Roman, Times-Italic, Times-Bold, @@ -129,7 +129,7 @@

    Common instructions

    Courier-Oblique, Courier-Bold, and -Courier-BoldOblique
    . +Courier-BoldOblique.
    The default is Helvetica.
    @@ -145,7 +145,7 @@

    Common instructions

    color name
    The following colors names are accepted by ps.map: - + aqua, black, blue, @@ -162,10 +162,10 @@

    Common instructions

    violet, white, yellow -
    . +.

    For vectors and some plotting commands you can also specify -'none' or 'R:G:B' (e.g '255:0:0'). +'none' or 'R:G:B' (e.g '255:0:0').
    yes|no @@ -188,26 +188,26 @@

    border

    Controls the border which is drawn around the map area.
     USAGE:  border [y|n]
    -	color color
    -	width #
    -	end
    +    color color
    +    width #
    +    end
     
    The color may be either a standard GRASS color, a R:G:B triplet, or "none". The width is specified in points, unless followed by an "i" in which case it is measured in inches. The default is a black border box of width 1 point.

    The border can be turned off completely with the -"border n" instruction. In this case +"border n" instruction. In this case the end command should not be given as the main command will be treated as a single line instruction.

    This example would create a grey border 0.1" wide.

     EXAMPLE:
    -	border
    -	color grey
    -	width 0.1i
    -	end
    +    border
    +    color grey
    +    width 0.1i
    +    end
     

    @@ -216,20 +216,20 @@

    colortable

    Prints the color table legend for the raster map layer anywhere on the page.
    -USAGE:	colortable [y|n]
    -	where x y
    -	raster raster map
    -	range minimum maximum
    -	width table width
    -	height table height (FP legend only)
    -	cols table columns
    -	font font name
    -	fontsize font size
    -	color text color
    -	nodata [Y|n]
    -	tickbar [y|N]
    -	discrete [y|n]
    -	end
    +USAGE:    colortable [y|n]
    +    where x y
    +    raster raster map
    +    range minimum maximum
    +    width table width
    +    height table height (FP legend only)
    +    cols table columns
    +    font font name
    +    fontsize font size
    +    color text color
    +    nodata [Y|n]
    +    tickbar [y|N]
    +    discrete [y|n]
    +    end
     
    For a categorical (CELL) map the color table will create a legend displaying @@ -245,7 +245,7 @@

    colortable

    The default text color is black.

    Omitting the colortable instruction would result in no color table. -If the colortable is turned off with a "colortable N" +If the colortable is turned off with a "colortable N" instruction the end command should not be given as the main command will be treated as a single line instruction.

    @@ -298,7 +298,7 @@

    Floating point (FCELL and DCELL) Maps

    information, starting at the left margin, with 4 columns:
     EXAMPLE:
    -	colortable y
    +    colortable y
             cols 4
             width 4
             end
    @@ -310,18 +310,18 @@ 

    comments

    Prints comments anywhere on the page.
    -USAGE:	comments commentfile
    -	where x y
    -	font font name
    -	fontsize font size
    -	color text color
    -	end
    +USAGE:    comments commentfile
    +    where x y
    +    font font name
    +    fontsize font size
    +    color text color
    +    end
     
    The default location is immediately below the last item item printed, starting at the left margin. The default text color is black.

    If you wish to use parentheses spanning multiple lines you will need to quote them with a backslash to prevent the PostScript interpreter from -getting confused. e.g. '\(' and '\)' +getting confused. e.g. '\(' and '\)'

    This example prints in blue @@ -330,13 +330,13 @@

    comments

    the page, using a 15/72 inch Helvetica Bold font.
     EXAMPLE:
    -	raster vegetation
    -	comments veg.comments
    -	where 1.5 7.25
    -	font Helvetica Bold
    -	fontsize 15
    -	color blue
    -	end
    +    raster vegetation
    +    comments veg.comments
    +    where 1.5 7.25
    +    font Helvetica Bold
    +    fontsize 15
    +    color blue
    +    end
     
    Presumably, the file veg.comments @@ -350,7 +350,7 @@

    copies

    Specifies the number of copies to be printed.
    -USAGE:	copies n
    +USAGE:    copies n
     
    Each page will be printed n times.

    This instruction is identical to the copies command line parameter. @@ -361,13 +361,13 @@

    eps

    Places EPS (Encapsulated PostScript) pictures on the output map.
    -USAGE:	eps east north
    -	eps x% y%
    -	epsfile EPS file
    -	scale #
    -	rotate #
    -	masked [y|n]
    -	end
    +USAGE:    eps east north
    +    eps x% y%
    +    epsfile EPS file
    +    scale #
    +    rotate #
    +    masked [y|n]
    +    end
     
    The EPS picture location is entered in the main instruction line by giving either the map @@ -388,12 +388,12 @@

    eps

    in original file and would not be masked by the current mask.
     EXAMPLE:
    -	eps 456000 7890000
    -	epsfile ./epsf/logo.eps
    -	scale 3
    -	rotate 20
    -	masked n
    -	end
    +    eps 456000 7890000
    +    epsfile ./epsf/logo.eps
    +    scale 3
    +    rotate 20
    +    masked n
    +    end
     
    Of course, multiple EPS pictures may be drawn with multiple eps @@ -405,13 +405,13 @@

    geogrid

    Overlays a geographic grid onto the output map.
    -USAGE:	geogrid spacing unit
    -	color color
    -	numbers # [color]
    -	font font name
    -	fontsize font size
    -	width #
    -	end
    +USAGE:    geogrid spacing unit
    +    color color
    +    numbers # [color]
    +    font font name
    +    fontsize font size
    +    width #
    +    end
     
    The spacing and spacing unit of the geographic grid is given on the main instruction line. The spacing unit is given as one of d for @@ -435,10 +435,10 @@

    geogrid

    lines would be numbered with yellow numbers.
     EXAMPLE:
    -	geogrid 30 m
    -	color blue
    -	numbers 2 yellow
    -	end
    +    geogrid 30 m
    +    color blue
    +    numbers 2 yellow
    +    end
     

    @@ -447,7 +447,7 @@

    greyrast

    Selects a raster map layer for output in shades of grey.
    -USAGE:	greyrast mapname
    +USAGE:    greyrast mapname
     
    For each ps.map @@ -460,14 +460,14 @@

    grid

    Overlays a coordinate grid onto the output map.
    -USAGE:	grid spacing
    -	color color
    -	numbers # [color]
    -	cross cross size
    -	font font name
    -	fontsize font size
    -	width #
    -	end
    +USAGE:    grid spacing
    +    color color
    +    numbers # [color]
    +    cross cross size
    +    font font name
    +    fontsize font size
    +    width #
    +    end
     
    The spacing of the grid is given (in the geographic coordinate system units) on the main instruction line. The subsection instructions @@ -487,10 +487,10 @@

    grid

    lines would be numbered with red numbers.
     EXAMPLE:
    -	grid 10000
    -	color green
    -	numbers 2 red
    -	end
    +    grid 10000
    +    color green
    +    numbers 2 red
    +    end
     

    @@ -499,7 +499,7 @@

    group

    Selects an RGB imagery group for output.
    -USAGE:	group groupname
    +USAGE:    group groupname
     
    This is similar to raster, except that it uses an imagery group instead of a raster map layer. The group must contain three raster map @@ -511,12 +511,12 @@

    header

    Prints the map header above the map.
    -USAGE:	header
    -	file header file
    -	font font name
    -	fontsize font size
    -	color text color
    -	end
    +USAGE:    header
    +    file header file
    +    font font name
    +    fontsize font size
    +    color text color
    +    end
     
    If the file sub-instruction is absent the header will consist of the map's title @@ -527,18 +527,17 @@

    header

    of the text in the text file specified, with some special formatting keys:
      -
    • %% - a literal % -
    • %n - ? newline ? -
    • %_ - horizontal bar -
    • %c - "<raster name> in mapset <mapset name>" -
    • %d - today's date -
    • %l - project name -
    • %L - project's text description -
    • %m - mapset name -
    • %u - user name -
    • %x - mask info -
    • %- - advance to this character column number (see example below) - +
    • %% - a literal %
    • +
    • %n - ? newline ?
    • +
    • %_ - horizontal bar
    • +
    • %c - "<raster name> in mapset <mapset name>"
    • +
    • %d - today's date
    • +
    • %l - project name
    • +
    • %L - project's text description
    • +
    • %m - mapset name
    • +
    • %u - user name
    • +
    • %x - mask info
    • +
    • %- - advance to this character column number (see example below)
    Example header file: @@ -557,15 +556,15 @@

    header

    This example prints (in red) whatever is in the file soils.hdr above -the map, using a 20/72 inch Courier font. +the map, using a 20/72 inch Courier font.

     EXAMPLE:
    -	header
    -	file soils.hdr
    -	font Courier
    -	fontsize 20
    -	color red
    -	end
    +    header
    +    file soils.hdr
    +    font Courier
    +    fontsize 20
    +    color red
    +    end
     

    @@ -577,9 +576,9 @@

    labels

    v.label ).
    -USAGE:	labels  labelfile
    -	font font name
    -	end
    +USAGE:    labels  labelfile
    +    font font name
    +    end
     

    NOTE: ps.map can read new option 'ROTATE:' from labels file, which specifies counter clockwise rotation in degrees. @@ -588,8 +587,8 @@

    labels

    towns on the map.
     EXAMPLE:
    -	labels town.names
    -	end
    +    labels town.names
    +    end
     

    @@ -598,12 +597,12 @@

    line

    Draws lines on the output map.
    -USAGE:	line east north east north
    -	line x% y% x% y%
    -	color color
    -	width #
    -	masked [y|n]
    -	end
    +USAGE:    line east north east north
    +    line x% y% x% y%
    +    color color
    +    width #
    +    masked [y|n]
    +    end
     
    The beginning and ending points of the line are entered on the main instruction. These points can be defined either by map coordinates or @@ -623,11 +622,11 @@

    line

    there is a mask.
     EXAMPLE:
    -	line 10% 80% 30% 70%
    -	color yellow
    -	width 2
    -	masked n
    -	end
    +    line 10% 80% 30% 70%
    +    color yellow
    +    width 2
    +    masked n
    +    end
     
    Of course, multiple lines may be drawn with multiple line @@ -640,14 +639,14 @@

    mapinfo

    Prints the portion of the map legend containing the scale, grid and region information, on or below the map.
    -USAGE:	mapinfo
    -	where x y
    -	font font name
    -	fontsize font size
    -	color text color
    -	background box color|none
    -	border color|none
    -	end
    +USAGE:    mapinfo
    +    where x y
    +    font font name
    +    fontsize font size
    +    color text color
    +    background box color|none
    +    border color|none
    +    end
     
    The default location is immediately below the map, starting at the left edge of the map. @@ -658,16 +657,16 @@

    mapinfo

    This example prints (in brown) the scale, grid and region information immediately below the map and starting 1.5 inches from the left edge -of the page, using a 12/72 inch Courier font. +of the page, using a 12/72 inch Courier font.

     EXAMPLE:
    -	mapinfo
    -	where 1.5 0
    -	font Courier
    -	fontsize 12
    -	color brown
    -	end
    +    mapinfo
    +    where 1.5 0
    +    font Courier
    +    fontsize 12
    +    color brown
    +    end
     

    @@ -676,7 +675,7 @@

    maploc

    Positions the map on the page.
    -USAGE:	maploc  x y [width height]
    +USAGE:    maploc  x y [width height]
     
    The upper left corner of the map will be positioned x inches from the left edge of the page and y inches from the top of the page. @@ -688,7 +687,7 @@

    maploc

    the left edge and 3.5 inches from the top edge of the map.
     EXAMPLE:
    -	maploc 2.0 3.5
    +    maploc 2.0 3.5
     

    @@ -697,7 +696,7 @@

    maskcolor

    Color to be used for mask.
    -USAGE:	maskcolor  color
    +USAGE:    maskcolor  color
     
    @@ -706,10 +705,10 @@

    outline

    Outlines the areas of a raster map layer with a specified color.
    -USAGE:	outline
    -	color  color
    -	width  width of line in points
    -	end
    +USAGE:    outline
    +    color  color
    +    width  width of line in points
    +    end
     
    Distinct areas of the raster map will be separated from each other visually by drawing a border (or outline) in the specified @@ -730,11 +729,11 @@

    outline

    in grey.
     EXAMPLE:
    -	raster soils
    -	outline
    -	color grey
    -	width 2
    -	end
    +    raster soils
    +    outline
    +    color grey
    +    width 2
    +    end
     

    @@ -743,14 +742,14 @@

    paper

    Specifies paper size and margins.
    -USAGE:	paper paper name
    -	height #
    -	width #
    -	left #
    -	right #
    -	bottom #
    -	top #
    -	end
    +USAGE:    paper paper name
    +    height #
    +    width #
    +    left #
    +    right #
    +    bottom #
    +    top #
    +    end
     
    paper may select predefined paper name (a4,a3,a2,a1,a0,us-legal,us-letter,us-tabloid). @@ -761,20 +760,20 @@

    paper

     EXAMPLE:
    -	paper a3
    -	end
    +    paper a3
    +    end
     

     EXAMPLE:
    -	paper
    -	width 10
    -	height 10
    -	left 2
    -	right 2
    -	bottom 2
    -	top 2
    -	end
    +    paper
    +    width 10
    +    height 10
    +    left 2
    +    right 2
    +    bottom 2
    +    top 2
    +    end
     

    @@ -783,16 +782,16 @@

    point

    Places additional points or icons on the output map.
    -USAGE:	point east north
    -	point x% y%
    -	color color
    -	fcolor color
    -	symbol symbol group/name
    -	size #
    -	width #
    -	rotate #
    -	masked [y|n]
    -	end
    +USAGE:    point east north
    +    point x% y%
    +    color color
    +    fcolor color
    +    symbol symbol group/name
    +    size #
    +    width #
    +    rotate #
    +    masked [y|n]
    +    end
     
    The point location is entered in the main instruction line by giving either the map coordinates or by using percentages of the geographic region. @@ -811,13 +810,13 @@

    point

    the size of a 15 points and would not be masked by the current mask.
     EXAMPLE:
    -	point 456000 7890000
    -	fcolor purple
    -	color black
    -	symbol basic/diamond
    -	size 15
    -	masked n
    -	end
    +    point 456000 7890000
    +    fcolor purple
    +    color black
    +    symbol basic/diamond
    +    size 15
    +    masked n
    +    end
     
    Of course, multiple points may be drawn with multiple point @@ -836,12 +835,12 @@

    psfile

    correct directory or specify the full path on the psfile instruction. (Note to /bin/csh users: ~ won't work with this instruction).
    -USAGE:	psfile filename
    +USAGE:    psfile filename
     
    This example copies the file "logo.ps" into the output file.
     EXAMPLE:
    -	psfile logo.ps
    +    psfile logo.ps
     

    @@ -850,7 +849,7 @@

    raster

    Selects a raster map layer for output.
    -USAGE:	raster mapname
    +USAGE:    raster mapname
     
    For each ps.map run, only one raster map layer (or set of layers or imagery group; see below) can be requested. If no @@ -867,7 +866,7 @@

    raster

     EXAMPLE:
    -	raster soils
    +    raster soils
     

    @@ -876,7 +875,7 @@

    read

    Provides ps.map with a previously prepared input stream.
    -USAGE:	read previously prepared UNIX file
    +USAGE:    read previously prepared UNIX file
     
    Mapping instructions can be placed into a file and read into ps.map. @@ -894,7 +893,7 @@

    read

    the vector map layer roads onto the output map.
     EXAMPLE:
    -	read pmap.roads
    +    read pmap.roads
     
    The user may have created this file because this vector map layer is particularly useful for many ps.map @@ -908,13 +907,13 @@

    rectangle

    Draws rectangle on the output map.
    -USAGE:	rectangle east north east north
    -	rectangle x% y% x% y%
    -	color color
    -	fcolor fill color
    -	width #
    -	masked [y|n]
    -	end
    +USAGE:    rectangle east north east north
    +    rectangle x% y% x% y%
    +    color color
    +    fcolor fill color
    +    width #
    +    masked [y|n]
    +    end
     
    The two corners of the rectangle are entered on the main instruction. These points can be defined either by map coordinates or @@ -936,12 +935,12 @@

    rectangle

    The border line would be 1/16" wide and would appear even if there is a mask.
     EXAMPLE:
    -	rectangle 10% 80% 30% 70%
    -	color yellow
    -	fcolor green
    -	width 0.0625i
    -	masked n
    -	end
    +    rectangle 10% 80% 30% 70%
    +    color yellow
    +    fcolor green
    +    width 0.0625i
    +    masked n
    +    end
     

    @@ -951,10 +950,10 @@

    region

    Places the outline of a smaller geographic region on the output.
    -USAGE:	region regionfile
    -	color color
    -	width #
    -	end
    +USAGE:    region regionfile
    +    color color
    +    width #
    +    end
     
    Geographic region settings are created and saved using the g.region module. @@ -971,10 +970,10 @@

    region

    g.region.
     EXAMPLE:
    -	region fire.zones
    -	color white
    -	width 2
    -	end
    +    region fire.zones
    +    color white
    +    width 2
    +    end
     

    @@ -983,7 +982,7 @@

    rgb

    Selects three raster map layers for output as an RGB color image.
    -USAGE:	rgb red green blue
    +USAGE:    rgb red green blue
     
    This is similar to raster, except that it uses three raster map layers instead of a single layer. The three layers @@ -1001,7 +1000,7 @@

    scale

    Selects a scale for the output map.
    -USAGE:	scale scale
    +USAGE:    scale scale
     
    The scale can be selected either as:
    @@ -1022,7 +1021,7 @@

    scale

    units.
     EXAMPLE:
    -	scale 1:25000
    +    scale 1:25000
     

    @@ -1031,16 +1030,16 @@

    scalebar

    Draws a scalebar on the map.
    -USAGE:	scalebar [f|s]
    -	where x y
    -	length overall distance in map units
    -	units [auto|meters|kilometers|feet|miles|nautmiles]
    -	height scale height in inches
    -	segment number of segments
    -	numbers #
    -	fontsize font size
    -	background [Y|n]
    -	end
    +USAGE:    scalebar [f|s]
    +    where x y
    +    length overall distance in map units
    +    units [auto|meters|kilometers|feet|miles|nautmiles]
    +    height scale height in inches
    +    segment number of segments
    +    numbers #
    +    fontsize font size
    +    background [Y|n]
    +    end
     
    Draw one of two types of scale bar. Fancy (f) draws alternating black and white scale boxes. @@ -1067,13 +1066,13 @@

    scalebar

    and is 0.25 inches high.
     EXAMPLE:
    -	scalebar s
    -	where 4 5
    -	length 1000
    -	height 0.25
    -	segment 5
    -	numbers 2
    -	end
    +    scalebar s
    +    where 4 5
    +    length 1000
    +    height 0.25
    +    segment 5
    +    numbers 2
    +    end
     
    @@ -1083,23 +1082,23 @@

    setcolor

    Overrides the color assigned to one or more categories of the raster map layer.
    -USAGE:	setcolor cat(s) color
    +USAGE:    setcolor cat(s) color
     
    This example would set the color for categories 2,5 and 8 of the raster map layer watersheds to white and category 10 to green. (NOTE: no spaces are inserted between the category values.)
     EXAMPLE:
    -	raster watersheds
    -	setcolor 2,5,8 white
    -	setcolor 10 green
    +    raster watersheds
    +    setcolor 2,5,8 white
    +    setcolor 10 green
     
    Of course, setcolor can be requested more than once to override the default color for additional categories. More than one category can be changed for each request by listing all the category values separated by commas (but with no spaces). Also ranges -can be included, for example "1,2,6-10,12". Colors for "null" and the -"default" (i.e. out-of-range) color may also be reassigned. +can be included, for example "1,2,6-10,12". Colors for "null" and the +"default" (i.e. out-of-range) color may also be reassigned.

    @@ -1107,23 +1106,23 @@

    text

    Places text on the map.
    -USAGE:	text  east north text
    -	text  x% y% text
    -	font fontname
    -	color color|none
    -	width #
    -	hcolor color|none
    -	hwidth #
    -	background color|none
    -	border color|none
    -	fontsize font size
    -	size #
    -	ref reference point
    -	rotate degrees CCW
    -	xoffset #
    -	yoffset #
    -	opaque [y|n]
    -	end
    +USAGE:    text  east north text
    +    text  x% y% text
    +    font fontname
    +    color color|none
    +    width #
    +    hcolor color|none
    +    hwidth #
    +    background color|none
    +    border color|none
    +    fontsize font size
    +    size #
    +    ref reference point
    +    rotate degrees CCW
    +    xoffset #
    +    yoffset #
    +    opaque [y|n]
    +    end
     
    The user specifies where the text will be placed by providing map coordinates or percentages of the geographic region. @@ -1134,7 +1133,7 @@

    text

    The user can then specify various text features:

    font: the PostScript font. Common possibilities are listed at the start of this -help page. The default is Helvetica. +help page. The default is Helvetica.

    color (see NAMED COLORS);

    width @@ -1180,18 +1179,18 @@

    text

    vectors on the map would stop at the border of this text.
     EXAMPLE:
    -	text 650000 7365000 SPEARFISH LAND COVER
    -	font romand
    -	color red
    -	width 2
    -	hcolor black
    -	hwidth 1
    -	background white
    -	border red
    -	size 500
    -	ref lower left
    -	opaque y
    -	end
    +    text 650000 7365000 SPEARFISH LAND COVER
    +    font romand
    +    color red
    +    width 2
    +    hcolor black
    +    hwidth 1
    +    background white
    +    border red
    +    size 500
    +    ref lower left
    +    opaque y
    +    end
     

    @@ -1200,21 +1199,21 @@

    vareas

    Selects a vector map layer for output and plots areas.
    -USAGE:	vareas vectormap
    -	layer # (layer number used with cats/where option)
    -	cats list of categories (e.g. 1,3,5-7)
    -	where SQL where statement
    -	masked [y|n]
    -	color color
    -	fcolor color
    -	rgbcolumn column
    -	width #
    -	label label to use in legend
    -	lpos position in legend
    -	pat pattern file
    -	pwidth #
    -	scale #
    -	end
    +USAGE:    vareas vectormap
    +    layer # (layer number used with cats/where option)
    +    cats list of categories (e.g. 1,3,5-7)
    +    where SQL where statement
    +    masked [y|n]
    +    color color
    +    fcolor color
    +    rgbcolumn column
    +    width #
    +    label label to use in legend
    +    lpos position in legend
    +    pat pattern file
    +    pwidth #
    +    scale #
    +    end
     
    The user can specify:

    color - color of the vector lines or area boundaries; @@ -1228,7 +1227,7 @@

    vareas

    for more information on the mask)

    cats - which categories should be plotted (default is all);

    where - select features using a SQL where statement. -For example: vlastnik = 'Cimrman'; +For example: vlastnik = 'Cimrman';

    label - for description in vlegend. Default is: map(mapset);

    lpos - position vector is plotted in legend. If lpos is @@ -1243,7 +1242,7 @@

    vareas

    until overwritten in the pattern file -->. Color of the boundaries remain set by the color instruction. Pattern may be scaled with the scale command. Several standard hatching -patterns are provided in $GISBASE/etc/paint/patterns/. +patterns are provided in $GISBASE/etc/paint/patterns/. Demonstrative images can be found on the GRASS Wiki site. @@ -1266,12 +1265,12 @@

    vareas

     EXAMPLE:
    -	vareas forest
    -	color blue
    -	width 1
    -	masked y
    -	cats 2,5-7
    -	end
    +    vareas forest
    +    color blue
    +    width 1
    +    masked y
    +    cats 2,5-7
    +    end
     

    @@ -1280,26 +1279,26 @@

    vlines

    Selects a vector map layer for output and plots lines.
    -USAGE:	vlines vectormap
    -	type line and/or boundary
    -	layer # (layer number used with cats/where option)
    -	cats list of categories (e.g. 1,3,5-7)
    -	where SQL where statement like: vlastnik = 'Cimrman'
    -	masked [y|n]
    -	color color
    -	rgbcolumn column
    -	width #
    -	cwidth #
    -	hcolor color
    -	hwidth #
    -	offset #
    -	coffset #
    -	ref left|right
    -	style 00001111
    -	linecap style
    -	label label
    -	lpos #
    -	end
    +USAGE:    vlines vectormap
    +    type line and/or boundary
    +    layer # (layer number used with cats/where option)
    +    cats list of categories (e.g. 1,3,5-7)
    +    where SQL where statement like: vlastnik = 'Cimrman'
    +    masked [y|n]
    +    color color
    +    rgbcolumn column
    +    width #
    +    cwidth #
    +    hcolor color
    +    hwidth #
    +    offset #
    +    coffset #
    +    ref left|right
    +    style 00001111
    +    linecap style
    +    label label
    +    lpos #
    +    end
     
    The user can specify:

    type - the default is lines only; @@ -1348,15 +1347,15 @@

    vlines

     EXAMPLE:
    -	vlines streams
    -	color blue
    -	width 2
    -	hcolor white
    -	hwidth 1
    -	masked y
    -	cats 2
    -	label Streams - category 2
    -	end
    +    vlines streams
    +    color blue
    +    width 2
    +    hcolor white
    +    hwidth 1
    +    masked y
    +    cats 2
    +    label Streams - category 2
    +    end
     

    @@ -1365,26 +1364,26 @@

    vpoints

    Selects vector point data to be placed on the output map
    -USAGE:	vpoints vectormap
    -	type point and/or centroid
    -	layer # (layer number used with cats/where/sizecol options)
    -	cats list of categories (e.g. 1,3,5-7)
    -	where SQL where statement like: vlastnik = 'Cimrman'
    -	masked [y|n]
    -	color color
    -	fcolor color
    -	rgbcolumn column
    -	width #
    -	eps epsfile
    -	symbol symbol group/name
    -	size #
    -	sizecolumn attribute column used for symbol sizing
    -	scale scaling factor for sizecolumn values
    -	rotate #
    -	rotatecolumn column
    -	label legend label
    -	lpos position in legend
    -	end
    +USAGE:    vpoints vectormap
    +    type point and/or centroid
    +    layer # (layer number used with cats/where/sizecol options)
    +    cats list of categories (e.g. 1,3,5-7)
    +    where SQL where statement like: vlastnik = 'Cimrman'
    +    masked [y|n]
    +    color color
    +    fcolor color
    +    rgbcolumn column
    +    width #
    +    eps epsfile
    +    symbol symbol group/name
    +    size #
    +    sizecolumn attribute column used for symbol sizing
    +    scale scaling factor for sizecolumn values
    +    rotate #
    +    rotatecolumn column
    +    label legend label
    +    lpos position in legend
    +    end
     
    The user may specify the the color of the sites (see section on NAMED COLORS); @@ -1403,11 +1402,11 @@

    vpoints

     EXAMPLE:
    -	vpoints windmills
    -	color blue
    -	symbol mills/windmill
    -	size 10
    -	end
    +    vpoints windmills
    +    color blue
    +    symbol mills/windmill
    +    size 10
    +    end
     

    @@ -1417,15 +1416,15 @@

    vlegend

    vector information, on or below the map.
    -USAGE:	vlegend
    -	where x y
    -	font font name
    -	fontsize font size
    -	width width of color symbol
    -	cols number of columns to print
    -	span column separation
    -	border color|none
    -	end
    +USAGE:    vlegend
    +    where x y
    +    font font name
    +    fontsize font size
    +    width width of color symbol
    +    cols number of columns to print
    +    span column separation
    +    border color|none
    +    end
     
    The default location is immediately below the legend containing the scale, grid and region information, starting at the left edge of the map. @@ -1454,11 +1453,11 @@

    vlegend

     EXAMPLE:
    -	vlegend
    -	where 4.5 0
    -	font Courier
    -	fontsize 12
    -	end
    +    vlegend
    +    where 4.5 0
    +    font Courier
    +    fontsize 12
    +    end
     

    @@ -1468,7 +1467,7 @@

    end

    Terminates input and begin painting the map.
    -USAGE:	end
    +USAGE:    end
     

    @@ -1499,7 +1498,7 @@

    EXAMPLES

    - +

    Figure: Result of for the a simple Wake county terrain and roads example

    @@ -1585,7 +1584,7 @@

    More complicated example

    - +

    Figure: Result of for the more complicated Wake county, NC example

    diff --git a/ps/ps.map/ps_vlegend.c b/ps/ps.map/ps_vlegend.c index dd99720a5fc..839f76cf3c5 100644 --- a/ps/ps.map/ps_vlegend.c +++ b/ps/ps.map/ps_vlegend.c @@ -281,5 +281,9 @@ int PS_vlegend(void) if (PS.min_y > y) PS.min_y = y; + G_free(nvec); + for (i = 0; i < vector.count; i++) + G_free(vec[i]); + G_free(vec); return 0; } diff --git a/ps/ps.map/ps_vlines.c b/ps/ps.map/ps_vlines.c index c5aced7c51b..fb3bc2fa760 100644 --- a/ps/ps.map/ps_vlines.c +++ b/ps/ps.map/ps_vlines.c @@ -206,5 +206,8 @@ int PS_vlines_plot(struct Map_info *P_map, int vec, int type) } fprintf(PS.fp, "\n"); fprintf(PS.fp, "0 setlinejoin\n"); /* reset line join to miter */ + Vect_destroy_line_struct(Points); + Vect_destroy_line_struct(nPoints); + Vect_destroy_cats_struct(Cats); return 0; } diff --git a/ps/ps.map/ps_vpoints.c b/ps/ps.map/ps_vpoints.c index cd320b46f11..8e00a2ad3de 100644 --- a/ps/ps.map/ps_vpoints.c +++ b/ps/ps.map/ps_vpoints.c @@ -269,5 +269,7 @@ int PS_vpoints_plot(struct Map_info *P_map, int vec) fprintf(PS.fp, "\n"); Vect_destroy_cats_struct(Cats); + Vect_destroy_line_struct(Points); + G_free(Symb); return 0; } diff --git a/pyproject.toml b/pyproject.toml index d739653809e..2300282ad27 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,11 @@ [project] name = "grass" requires-python = ">=3.9" +# *GRASS TODO: keep in sync with MIN_PYTHON_VERSION supported* [tool.black] -required-version = '24' line-length = 88 +required-version = '24' target-version = ['py39', 'py310', 'py311', 'py312', 'py313'] # 'extend-exclude' excludes files or directories in addition to the defaults extend-exclude = ''' @@ -14,7 +15,7 @@ extend-exclude = ''' ''' [tool.ruff] -required-version = ">=0.6.0" +required-version = ">=0.8.0" builtins = ["_"] @@ -33,8 +34,13 @@ select = [ "C4", # flake8-comprehensions (C4) "COM", # flake8-commas (COM) "D", # pydocstyle (D) + "D202", # pydocstyle (D) blank-line-after-function + "D209", # pydocstyle (D) new-line-after-last-paragraph + "D211", # pydocstyle (D) blank-line-before-class + "D212", # pydocstyle (D) multi-line-summary-first-line "DTZ", # flake8-datetimez (DTZ) "E", # pycodestyle (E, W) + "EM", # flake8-errmsg (EM) "F", # Pyflakes (F) "FA", # flake8-future-annotations (FA) "FBT", # flake8-boolean-trap (FBT) @@ -101,19 +107,22 @@ ignore = [ "COM812", # missing-trailing-comma "COM818", # trailing-comma-on-bare-tuple "D1", - "D2", - "D300", # triple-single-quotes + "D200", # unnecessary-multiline-docstring + "D202", # blank-line-after-function (selected) + "D203", # incorrect-blank-line-before-class (ignored, use D211) + "D205", # missing-blank-line-after-summary + "D209", # new-line-after-last-paragraph (selected) + "D212", # multi-line-summary-first-line (selected) + "D213", # multi-line-summary-second-line (ignored, use D212) + "D214", # overindented-section "D301", # escape-sequence-in-docstring "D400", # ends-in-period "D401", # non-imperative-mood - "D402", # no-signature "D403", # first-line-capitalized "D404", # docstring-starts-with-this "D405", # capitalize-section-name - "D406", # new-line-after-section-name "D407", # dashed-underline-after-section "D409", # section-underline-matches-section-length - "D411", # no-blank-line-before-section "D412", # blank-lines-between-header-and-content "D413", # blank-line-after-last-section "D415", # ends-in-punctuation @@ -125,11 +134,9 @@ ignore = [ "DTZ006", # call-datetime-fromtimestamp "DTZ007", # call-datetime-strptime-without-zone "DTZ011", # call-date-today - "E402", # module-import-not-at-top-of-file "E501", # line-too-long "E721", # type-comparison "E722", # bare-except - "E731", # lambda-assignment "E741", # ambiguous-variable-name "F403", # undefined-local-with-import-star "F405", # undefined-local-with-import-star-usage @@ -147,7 +154,6 @@ ignore = [ "N803", # invalid-argument-name "N806", # non-lowercase-variable-in-function "N812", # lowercase-imported-as-non-lowercase - "N814", # camelcase-imported-as-constant "N815", # mixed-case-variable-in-class-scope "N816", # mixed-case-variable-in-global-scope "N818", # error-suffix-on-exception-name @@ -183,7 +189,6 @@ ignore = [ "PLW1641", # eq-without-hash "PLW2901", # redefined-loop-name "PLW3201", # bad-dunder-method-name - "PT004", # pytest-missing-fixture-name-underscore # deprecated, so doesn't appear with --preview "PTH100", # os-path-abspath "PTH101", # os-chmod "PTH102", # os-mkdir @@ -219,7 +224,6 @@ ignore = [ "S108", # hardcoded-temp-file "S110", # try-except-pass "S112", # try-except-continue - "S113", # request-without-timeout "S202", # tarfile-unsafe-members "S307", # suspicious-eval-usage "S310", # suspicious-url-open-usage @@ -239,12 +243,10 @@ ignore = [ "SIM102", # collapsible-if "SIM105", # suppressible-exception "SIM113", # enumerate-for-loop - "SIM116", # if-else-block-instead-of-dict-lookup "SIM118", # in-dict-keys "SIM223", # expr-and-false "SLF001", # private-member-access "TRY002", # raise-vanilla-class - "TRY003", # raise-vanilla-args "TRY004", # type-check-without-type-error "TRY201", # verbose-raise "TRY300", # try-consider-else @@ -260,79 +262,98 @@ ignore = [ # See https://docs.astral.sh/ruff/settings/#lint_per-file-ignores # "A005", # builtin-module-shadowing # "PLW0108", # unnecessary-lambda -# Ignore `E402` (import violations) in all `__init__.py` files +# "SIM115", # open-file-with-context-handler +# Ignored after reverting: +"gui/**" = ["PLW0108"] # See https://github.com/OSGeo/grass/issues/4124 +# Other ignores: +"**.py" = ["PYI066"] "*/testsuite/**.py" = ["PT009", "PT027"] -"__init__.py" = ["E402"] "display/d.mon/render_cmd.py" = ["SIM115"] -"gui/**" = ["PLW0108"] # See https://github.com/OSGeo/grass/issues/4124 "gui/wxpython/animation/temporal_manager.py" = ["SIM115"] "gui/wxpython/core/*.py" = ["SIM115"] +"gui/wxpython/core/globalvar.py" = ["PTH208"] +"gui/wxpython/core/settings.py" = ["PTH208"] +"gui/wxpython/datacatalog/catalog.py" = ["PTH208"] "gui/wxpython/dbmgr/base.py" = ["SIM115"] -"gui/wxpython/gcp/manager.py" = ["SIM115"] +"gui/wxpython/gcp/manager.py" = ["PTH208", "SIM115"] "gui/wxpython/gmodeler/*.py" = ["SIM115"] "gui/wxpython/gui_core/*.py" = ["SIM115"] -"gui/wxpython/iclass/frame*.py" = ["SIM115"] -"gui/wxpython/iclass/frame.py" = ["FLY002"] +"gui/wxpython/gui_core/dialogs.py" = ["PTH208"] +"gui/wxpython/iclass/frame.py" = ["FLY002", "SIM115"] "gui/wxpython/iclass/statistics.py" = ["A005"] +"gui/wxpython/icons/grass_icons.py" = ["PTH208"] "gui/wxpython/image2target/*.py" = ["SIM115"] -"gui/wxpython/iscatt/plots.py" = ["PLW0108"] +"gui/wxpython/image2target/ii2t_manager.py" = ["PTH208"] "gui/wxpython/lmgr/workspace.py" = ["SIM115"] "gui/wxpython/location_wizard/wizard.py" = ["SIM115"] "gui/wxpython/mapdisp/main.py" = ["SIM115"] "gui/wxpython/modules/colorrules.py" = ["SIM115"] "gui/wxpython/modules/mcalc_builder.py" = ["SIM115"] -"gui/wxpython/photo2image/*.py" = ["SIM115"] -"gui/wxpython/psmap/*.py" = ["SIM115"] +"gui/wxpython/photo2image/ip2i_manager.py" = ["SIM115"] +"gui/wxpython/psmap/dialogs.py" = ["PTH208"] +"gui/wxpython/psmap/frame.py" = ["SIM115"] +"gui/wxpython/psmap/instructions.py" = ["SIM115"] "gui/wxpython/psmap/utils.py" = ["PGH004"] "gui/wxpython/rdigit/controller.py" = ["SIM115"] "gui/wxpython/rlisetup/*.py" = ["SIM115"] +"gui/wxpython/rlisetup/frame.py" = ["PTH208"] "gui/wxpython/timeline/frame.py" = ["FLY002"] "gui/wxpython/tools/update_menudata.py" = ["SIM115"] "gui/wxpython/tplot/frame.py" = ["FLY002"] "gui/wxpython/vdigit/mapwindow.py" = ["SIM115"] -"gui/wxpython/vnet/*.py" = ["SIM115"] +"gui/wxpython/vnet/vnet_core.py" = ["SIM115"] +"gui/wxpython/vnet/vnet_data.py" = ["SIM115"] +"gui/wxpython/vnet/widgets.py" = ["SIM115"] "gui/wxpython/web_services/dialogs.py" = ["SIM115"] -"gui/wxpython/wxplot/profile*.py" = ["SIM115"] -"gui/wxpython/wxplot/profile.py" = ["A005"] +"gui/wxpython/wxplot/profile.py" = ["A005", "SIM115"] "imagery/i.atcorr/create_iwave.py" = ["SIM115"] "lib/imagery/testsuite/test_imagery_signature_management.py" = ["SIM115"] "lib/imagery/testsuite/test_imagery_sigsetfile.py" = ["FURB152"] "lib/init/grass.py" = ["SIM115"] +"lib/init/testsuite/test_grass_tmp_mapset.py" = ["PTH208"] "locale/grass_po_stats.py" = ["SIM115"] -"man/build_*.py" = ["SIM115"] -"man/parser_standard_options.py" = ["SIM115"] +"man/build.py" = ["PTH208"] +"man/build_class_graphical.py" = ["PTH208"] +"man/build_manual_gallery.py" = ["PTH208"] +"man/build_rest.py" = ["PTH208"] "python/grass/__init__.py" = ["PYI056"] "python/grass/exp*/tests/grass_script_mapset_session_test.py" = ["SIM117"] "python/grass/exp*/tests/grass_script_tmp_mapset_session_test.py" = ["SIM117"] "python/grass/gunittest/case.py" = ["PT009"] -"python/grass/gunittest/loader.py" = ["PYI024"] +"python/grass/gunittest/loader.py" = ["PTH208", "PYI024"] "python/grass/gunittest/multireport.py" = ["PYI024"] "python/grass/gunittest/testsu*/d*/s*/s*/subsub*/t*/test_segfaut.py" = ["B018"] "python/grass/gunittest/testsuite/test_assertions_rast3d.py" = ["FLY002"] "python/grass/imaging/images2*.py" = ["SIM115"] +"python/grass/imaging/images2ims.py" = ["PTH208"] "python/grass/jupyter/testsuite/interactivemap_test.py" = ["PGH004"] "python/grass/jupyter/testsuite/map_test.py" = ["PGH004"] "python/grass/pydispatch/signal.py" = ["A005"] -"python/grass/pygrass/modules/grid/grid.py" = ["SIM115"] +"python/grass/pygrass/gis/__init__.py" = ["PTH208"] +"python/grass/pygrass/modules/grid/grid.py" = ["PTH208", "SIM115"] "python/grass/pygrass/modules/grid/testsuite/test_*_modules_grid_doctests.py" = ["F401"] "python/grass/pygrass/modules/interface/env.py" = ["SIM115"] "python/grass/pygrass/modules/testsuite/test_pygrass_modules_doctests.py" = ["F401"] +"python/grass/pygrass/raster/category.py" = ["FURB189"] "python/grass/pygrass/raster/segment.py" = ["SIM115"] "python/grass/pygrass/tests/*.py" = ["SIM115"] +"python/grass/pygrass/utils.py" = ["PTH208"] "python/grass/pygrass/vector/geometry.py" = ["PYI024"] "python/grass/pygrass/vector/sql.py" = ["FLY002"] "python/grass/pygrass/vector/testsuite/test_table.py" = ["PLW0108"] "python/grass/script/array.py" = ["A005"] -"python/grass/script/core.py" = ["SIM115"] +"python/grass/script/core.py" = ["PTH208"] "python/grass/script/db.py" = ["SIM115"] "python/grass/script/raster.py" = ["SIM115"] -"python/grass/script/utils.py" = ["SIM115"] +"python/grass/script/utils.py" = ["FURB189", "SIM115"] "python/grass/temporal/aggregation.py" = ["SIM115"] "python/grass/temporal/register.py" = ["SIM115"] "python/grass/temporal/stds_export.py" = ["SIM115"] "python/grass/temporal/stds_import.py" = ["SIM115"] +"python/grass/temporal/temporal_algebra.py" = ["D300"] +"python/grass/temporal/temporal_operator.py" = ["D300"] "python/grass/temporal/univar_statistics.py" = ["SIM115"] -"python/grass/utils/download.py" = ["SIM115"] +"python/grass/utils/download.py" = ["PTH208", "SIM115"] "raster/r.*/testsuite/*.py" = ["SIM115"] "raster/r.topidx/*.py" = ["SIM115"] "raster3d/r3.flow/testsuite/r3flow_test.py" = ["FLY002"] @@ -343,8 +364,10 @@ ignore = [ "scripts/db.in.ogr/db.in.ogr.py" = ["SIM115"] "scripts/db.test/db.test.py" = ["SIM115"] "scripts/db.univar/db.univar.py" = ["SIM115"] +"scripts/g.download.project/g.download.project.py" = ["PTH208"] "scripts/g.extension.all/g.extension.all.py" = ["SIM115"] -"scripts/g.extension/g.extension.py" = ["SIM115"] +"scripts/g.extension/g.extension.py" = ["PTH208", "SIM115"] +"scripts/g.extension/testsuite/test_addons_modules.py" = ["PTH208"] "scripts/g.search.modules/g.search.modules.py" = ["SIM115"] "scripts/i.in.spotvgt/i.in.spotvgt.py" = ["SIM115"] "scripts/i.oif/i.oif*.py" = ["SIM115"] @@ -367,18 +390,17 @@ ignore = [ "temporal/t.register/testsuite/test_t_register_raster.py" = ["FLY002"] "temporal/t.register/testsuite/test_t_register_raster_file.py" = ["FLY002"] "temporal/t.remove/t.remove.py" = ["SIM115"] -"temporal/t.unregister/t.unregister.py" = ["SIM115"] "utils/**.py" = ["SIM115"] "utils/generate_release_notes.py" = ["PGH004"] +"utils/thumbnails.py" = ["PTH208"] "vector/v.fill.holes/examples.ipynb" = ["PTH201"] [tool.ruff.lint.flake8-import-conventions.extend-aliases] # Declare a custom aliases, checked with rule ICN001 "grass.script" = "gs" +"grass.temporal" = "tgis" [tool.pytest.ini_options] -minversion = "6.0" -python_files = "*/tests/*_test.py" addopts = """ --ignore-glob='dist.*' --ignore-glob='bin.*' @@ -387,11 +409,13 @@ addopts = """ --doctest-glob='*doctest*.txt' --ignore='raster/r.category/test_rcategory_doctest.txt' """ -timeout = 300 markers = [ "slow: marks tests as slow (deselect with '-m \"not slow\"')", "needs_solo_run: marks tests that must be run without any other tests running in parallel", ] +minversion = "6.0" +python_files = "*/tests/*_test.py */tests/test_*.py" +timeout = 300 [tool.bandit] @@ -402,3 +426,337 @@ exclude_dirs = [ "utils/test_generate_last_commit_file.py", ] skips = ["B324", "B110", "B101", "B112", "B311", "B404", "B603"] + + +[tool.pylint.main] + +# Files or directories to be skipped. They should be base names, not paths. +ignore = ["CVS", ".git", ".ruff_cache", ".pytest_cache"] + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, it +# can't be used as an escape character. +ignore-paths = [ + "bin[.].*", + "dist[.].*", + "gui/wxpython/menustrings.py", + "python/libgrass_interface_generator", +] + +# Files or directories matching the regular expression patterns are skipped. The +# regex matches against base names, not paths. The default value ignores Emacs +# file locks +ignore-patterns = ["^\\.#", "OBJ[.].*", "Makefile"] + + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs = 0 + +# Control the amount of potential inferred values when inferring a single object. +# This can help the performance when dealing with large functions or complex, +# nested conditions. +# *GRASS modified: default 'limit-inference-results = 100'* +limit-inference-results = 10 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins = [ + "pylint.extensions.bad_builtin", + "pylint.extensions.no_self_use", + "pylint.extensions.redefined_variable_type", +] + + +# Minimum Python version to use for version dependent checks. Will default to the +# version used to run pylint. +# *GRASS modified* +# *GRASS TODO: keep in sync with MIN_PYTHON_VERSION supported* +py-version = "3.9" + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots = ["python", "gui/wxpython"] + + +[tool.pylint.design] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +# exclude-too-few-public-methods = + +# List of qualified class names to ignore when counting class parents (see R0901) +# ignored-parents = + +# Maximum number of arguments for function / method. +# *GRASS modified: default 5, (see R0913)* +# *GRASS TODO: Try to reduce this value* +max-args = 15 + +# Maximum number of attributes for a class (see R0902). +# *GRASS modified: default 7* +# *GRASS TODO: Try to reduce this value* +max-attributes = 25 + +# Maximum number of boolean expressions in an if statement (see R0916). +# *GRASS modified: default 5* +# *GRASS TODO: Try to reduce this value* +max-bool-expr = 8 + +# Maximum number of branch for function / method body. +# *GRASS modified: default 12, (see R0912)* +# *GRASS TODO: Try to reduce this value* +max-branches = 44 + +# Maximum number of locals for function / method body. +# *GRASS modified: default 15, (see R0914)* +# *GRASS TODO: Try to reduce this value* +max-locals = 50 + +# # Maximum number of parents for a class (see R0901). +# max-parents = 7 + +# Maximum number of positional arguments for function / method. +# *GRASS modified: default 5, (see R0917)* +# *GRASS TODO: Try to reduce this value* +max-positional-arguments = 7 + +# Maximum number of public methods for a class (see R0904). +# max-public-methods = 20 + +# Maximum number of return / yield for function / method body. +# *GRASS modified: default 6, (see R0911)* +# *GRASS TODO: Try to reduce this value* +max-returns = 9 + +# Maximum number of statements in function / method body. +# *GRASS modified: default 50, (see R0915)* +# *GRASS TODO: Try to reduce this value* +max-statements = 220 + +# Minimum number of public methods for a class (see R0903). +# *GRASS modified: default 2, (see R0915)* +# *GRASS TODO: Try to increase this value* +min-public-methods = 1 + + +[tool.pylint.format] + +# Maximum number of lines in a module. +# *GRASS modified: default 1000, (see C0302)* +# *GRASS TODO: Try to reduce this value* +max-module-lines = 2000 + + +[tool.pylint."messages control"] +# Disable the message, report, category or checker with the given id(s). You can +# either give multiple identifiers separated by comma (,) or put this option +# multiple times (only on the command line, not in the configuration file where +# it should appear only once). You can also use "--disable=all" to disable +# everything first and then re-enable specific checks. For example, if you want +# to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable = [ + # *GRASS: The 10 following rules are disabled by default* + "bad-inline-option", + "deprecated-pragma", + "file-ignored", + "locally-disabled", + "raw-checker-failed", + "suppressed-message", + "use-implicit-booleaness-not-comparison-to-string", + "use-implicit-booleaness-not-comparison-to-zero", + "use-symbolic-message-instead", + "useless-suppression", + # *GRASS modified: The following rules are added to the disabled list* + # *GRASS TODO: Try to ignore less rules* + "C0103", # Constant name "%s" doesn't conform to UPPER_CASE naming style (invalid-name) + "C0104", # Disallowed name "foo" (disallowed-name) + "C0112", # Empty %s docstring (empty-docstring) + "C0114", # Missing module docstring (missing-module-docstring) + "C0115", # Missing class docstring (missing-class-docstring) + "C0116", # Missing function or method docstring (missing-function-docstring) + "C0200", # Consider using enumerate instead of iterating with range and len (consider-using-enumerate) + "C0201", # Consider iterating the dictionary directly instead of calling .keys() (consider-iterating-dictionary) + "C0204", # Metaclass class method %s should have %s as first argument (bad-mcs-classmethod-argument) + "C0206", # Consider iterating with .items() (consider-using-dict-items) + "C0207", # Use str(self.start).split(' ', maxsplit=1)[0] instead (use-maxsplit-arg) + "C0209", # Formatting a regular string which could be an f-string (consider-using-f-string) + "C0301", # Line too long (line-too-long) + "C0302", # Too many lines in module (%s/%s) (too-many-lines) + "C0325", # Unnecessary parens after %r keyword (superfluous-parens) + "C0411", # Standard library/third-party library import should be placed before local imports (wrong-import-order) + "C0412", # Imports from package %s are not grouped (ungrouped-imports) + "C0413", # Import "%s" should be placed at the top of the module (wrong-import-position) + "C0415", # (import-outside-toplevel) + "C1802", # Do not use `len(SEQUENCE)` without comparison to determine if a sequence is empty (use-implicit-booleaness-not-len) + "C1803", # "%s" can be simplified to "%s", if it is strictly a sequence, as an empty %s is falsey (use-implicit-booleaness-not-comparison) + "C2801", # Unnecessarily calls dunder method %s. %s. (unnecessary-dunder-call) + "E0102", # %s already defined line %s (function-redefined) + "E0203", # Access to member %r before its definition line %s (access-member-before-definition) + "E0213", # Method %r should have "self" as first argument (no-self-argument) + "E0401", # Unable to import %r (import-error) + "E0601", # Using variable %r before assignment (used-before-assignment) + "E0602", # Undefined variable %r (undefined-variable) + "E0606", # Possibly using variable %r before assignment (possibly-used-before-assignment) + "E0611", # No name %r in module %r (no-name-in-module) + "E0633", # Attempting to unpack a non-sequence%s (unpacking-non-sequence) + "E0704", # The raise statement is not inside an except clause (misplaced-bare-raise) + "E1003", # Bad first argument %r given to super() (bad-super-call) + "E1101", # Instance of %r has no %r member (no-member) + "E1102", # %s is not callable (not-callable) + "E1111", # Assigning result of a function call, where the function has no return (assignment-from-no-return) + "E1120", # No value for argument %r in constructor call (no-value-for-parameter) + "E1121", # (too-many-function-args) + "E1123", # Unexpected keyword argument %r in %s call (unexpected-keyword-arg) + "E1124", # Argument %r passed by position and keyword in %s call (redundant-keyword-arg) + "E1126", # Sequence index is not an int, slice, or instance with __index__ (invalid-sequence-index) + "E1128", # Assigning result of a function call, where the function returns None (assignment-from-none) + "E1133", # Non-iterable value %s is used in an iterating context (not-an-iterable) + "E1136", # Value '%s' is unsubscriptable (unsubscriptable-object) + "E1137", # %r does not support item assignment (unsupported-assignment-operation) + "E1307", # Argument %r does not match format type %r (bad-string-format-type) + "R0204", # Redefinition of %s type from %s to %s (redefined-variable-type) + "R0401", # Cyclic import (%s) (cyclic-import) + "R0801", # Similar lines in %s files %s (duplicate-code) + "R0901", # (too-many-ancestors) + "R0902", # (too-many-instance-attributes) + "R0903", # (too-few-public-methods) + "R0904", # (too-many-public-methods) + "R0911", # (too-many-return-statements) + "R0912", # (too-many-branches) + "R0913", # (too-many-arguments) + "R0914", # (too-many-locals) + "R0915", # (too-many-statements) + "R0916", # (too-many-boolean-expressions) + "R0917", # (too-many-positional-arguments) + "R1702", # (too-many-nested-blocks) + "R1704", # Redefining argument with the local name %r (redefined-argument-from-local) + "R1705", # Unnecessary "else" after "return", remove the "else" and de-indent the code inside it (no-else-return) + "R1710", # Either all return statements in a function should return an expression, or none of them should. (inconsistent-return-statements) + "R1712", # Consider using tuple unpacking for swapping variables (consider-swap-variables) + "R1713", # Consider using str.join(sequence) for concatenating strings from an iterable (consider-using-join) + "R1714", # Consider merging these comparisons with 'in' by using '%s %sin (%s)'. Use a set instead if elements are hashable. (consider-using-in) + "R1715", # Consider using dict.get for getting values from a dict if a key is present or a default if not (consider-using-get) + "R1724", # Unnecessary "%s" after "continue", %s (no-else-continue) + "R1727", # Boolean condition '%s' will always evaluate to '%s' (condition-evals-to-constant) + "R1732", # Consider using 'with' for resource-allocating operations (consider-using-with) + "R1733", # Unnecessary dictionary index lookup, use '%s' instead (unnecessary-dict-index-lookup) + "R1735", # Consider using '{**item}' instead of a call to 'dict'. (use-dict-literal) + "R6301", # Method could be a function (no-self-use) + "RP0401", # Report: Imports checker: External dependencies + "RP0801", # Report: Similarities: Duplication + "W0101", # Unreachable code (unreachable) + "W0102", # Dangerous default value %s as argument (dangerous-default-value) + "W0104", # Statement seems to have no effect (pointless-statement) + "W0106", # Expression "%s" is assigned to nothing (expression-not-assigned) + "W0108", # Lambda may not be necessary (unnecessary-lambda) + "W0123", # Use of eval (eval-used) + "W0125", # Using a conditional statement with a constant value (using-constant-test) + "W0133", # Exception statement has no effect (pointless-exception-statement) + "W0141", # Used builtin function %s (bad-builtin) + "W0201", # Attribute %r defined outside __init__(attribute-defined-outside-init) + "W0212", # Access to a protected member %s of a client class (protected-access) + "W0221", # (arguments-differ) + "W0222", # Signature differs from %s %r method (signature-differs) + "W0223", # Method %r is abstract in class %r but is not overridden in child class %r (abstract-method) + "W0237", # (arguments-renamed) + "W0401", # Wildcard import %s (wildcard-import) + "W0404", # Reimport %r (imported line %s) (reimported) + "W0511", # (fixme) + "W0601", # Global variable %r undefined at the module level (global-variable-undefined) + "W0602", # Using global for %r but no assignment is done (global-variable-not-assigned) + "W0603", # Using the global statement (global-statement) + "W0604", # Using the global statement at the module level (global-at-module-level) + "W0611", # (unused-import) + "W0612", # Unused variable %r (unused-variable) + "W0613", # Unused argument %r (unused-argument) + "W0614", # Unused import(s) %s from wildcard import of %s (unused-wildcard-import) + "W0621", # Redefining name %r from outer scope (line %s) (redefined-outer-name) + "W0621", # Redefining name %r from outer scope (line %s) (redefined-outer-name) + "W0622", # Redefining built-in %r (redefined-builtin) + "W0631", # Using possibly undefined loop variable %r (undefined-loop-variable) + "W0632", # Possible unbalanced tuple unpacking with sequence %s: left side has %d label%s, right side has %d value%s (unbalanced-tuple-unpacking) + "W0640", # Cell variable %s defined in loop (cell-var-from-loop) + "W0641", # Possibly unused variable %r (possibly-unused-variable) + "W0702", # No exception type(s) specified (bare-except) + "W0707", # Consider explicitly re-raising using %s'%s from %s' (raise-missing-from) + "W0716", # Invalid exception operation. %s (wrong-exception-operation) + "W0718", # Catching too general exception Exception (broad-exception-caught) + "W0719", # Raising too general exception: %s (broad-exception-raised) + "W1113", # Keyword argument before variable positional arguments list in the definition of %s function (keyword-arg-before-vararg) + "W1114", # Positional arguments appear to be out of order (arguments-out-of-order) + "W1308", # Duplicate string formatting argument %r, consider passing as named argument (duplicate-string-formatting-argument) + "W1503", # Redundant use of %s with constant value %r (redundant-unittest-assert) + "W1508", # %s default type is %s. Expected str or None. (invalid-envvar-default) + "W1510", # 'subprocess.run' used without explicitly defining the value for 'check'. (subprocess-run-check) + "W1514", # Using open without explicitly specifying an encoding (unspecified-encoding) + "W4701", # Iterated list '%s' is being modified inside for loop body, consider iterating through a copy of it instead. (modified-iterating-list) + "W4901", # Deprecated module %r (deprecated-module) + "W4902", # Using deprecated method %s (deprecated-method) + # "RP0402", # Report: Imports checker: Modules dependencies graph + + "similarities", # too long to run +] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where it +# should appear only once). See also the "--disable" option for examples. +enable = ["deprecated-pragma"] + + +[tool.pylint.refactoring] +# Maximum number of nested blocks for function / method body +# *GRASS modified: default 5, (see R1702)* +# *GRASS TODO: Try to reduce this value* +max-nested-blocks = 7 + + +[tool.pylint.reports] +# Set the output format. Available formats are: text, parseable, colorized, json2 +# (improved json format), json (old json format) and msvs (visual studio). You +# can also give a reporter class, e.g. mypackage.mymodule.MyReporterClass. +output-format = "colorized" + +# Tells whether to display a full report or only the messages. +reports = true + +[tool.pylint.similarities] +# Minimum lines number of a similarity. +# *GRASS modified: default 4, (see R0801)* +# *GRASS TODO: Try to reduce this value* +# *GRASS justification: Matching only larger chunks of code, not the default 4 lines.* +min-similarity-lines = 50 + +[tool.pylint.variables] +# List of additional names supposed to be defined in builtins. Remember that you +# should avoid defining new builtins when possible. +# *GRASS modified: default '# additional-builtins ='* +# *GRASS justification: Translation function is (unfortunately) defined as a builtin.* +# *GRASS TODO: Find another pattern for translation that allows to use _ as a discard variable and other* +additional-builtins = ["_"] + + +[tool.pyright] +exclude = ["**/__pycache__", "**/OBJ.*", "bin.*/**", "dist.*/**"] +include = ["python"] + +# Set extraPath to the output of $(grass --config python_path) + +# *GRASS modified* +# *GRASS TODO: keep in sync with MIN_PYTHON_VERSION supported* +pythonVersion = "3.9" + +[tool.pyrefact] +line_length = 88 + +[tool.isort] +profile = "black" +# *GRASS TODO: keep in sync with MIN_PYTHON_VERSION supported* +py_version = 39 diff --git a/python/.pylintrc b/python/.pylintrc deleted file mode 100644 index 0df20a6e3ba..00000000000 --- a/python/.pylintrc +++ /dev/null @@ -1,608 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-allow-list= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. (This is an alternative name to extension-pkg-allow-list -# for backward compatibility.) -extension-pkg-whitelist= - -# Return non-zero exit code if any of these messages/categories are detected, -# even if score is above --fail-under value. Syntax same as enable. Messages -# specified are enabled, while categories only check already-enabled messages. -fail-on= - -# Specify a score threshold to be exceeded before program exits with error. -fail-under=10.0 - -# Files or directories to be skipped. They should be base names, not paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the ignore-list. The -# regex matches against paths and can be in Posix or Windows format. -ignore-paths=grass/temporal/.*, - grass/script/.*, - grass/gunittest/.*, - grass/pydispatch/.*, - grass/pygrass/.*, - grass/imaging/.*, - grass/semantic_label/.*, - grass/grassdb/.*, - grass/utils/.*, - grass/exceptions/.*, - grass/app/.*, - - -# Files or directories matching the regex patterns are skipped. The regex -# matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Minimum Python version to use for version dependent checks. Will default to -# the version used to run pylint. -py-version=3.8 - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - deprecated-pragma, - use-symbolic-message-instead, - duplicate-code - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit,argparse.parse_error - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: fr_MC (myspell), fr_CA -# (myspell), fr_BE (myspell), fr_LU (myspell), fr_CH (myspell), fr_FR -# (myspell), ar (myspell), es_CR (myspell), de_CH_frami (myspell), es_EC -# (myspell), ar_YE (myspell), en_CA (myspell), ar_BH (myspell), ar_IN -# (myspell), ar_TN (myspell), en_ZA (myspell), de_DE_frami (myspell), ar_SY -# (myspell), ar_IQ (myspell), ar_LB (myspell), ar_KW (myspell), ru_RU -# (myspell), es_BO (myspell), en_GB (myspell), ar_SD (myspell), de_DE -# (myspell), es_CU (myspell), es_PA (myspell), ar_EG (myspell), es_HN -# (myspell), de_CH (myspell), es_NI (myspell), es_AR (myspell), es_ES -# (myspell), ar_SA (myspell), es_VE (myspell), de_AT_frami (myspell), it_IT -# (myspell), ar_OM (myspell), ar_DZ (myspell), it_CH (myspell), es_MX -# (myspell), es_PY (myspell), en_AU (myspell), es_DO (myspell), es_SV -# (myspell), es_PR (myspell), es_GT (myspell), ar_LY (myspell), ar_JO -# (myspell), en_US (myspell), de_AT (myspell), es_PE (myspell), ar_QA -# (myspell), es_CL (myspell), pt_BR (myspell), ar_AE (myspell), pt_PT -# (myspell), es_CO (myspell), es_UY (myspell), ar_MA (myspell), fr (myspell), -# es_US (myspell), en (aspell). -spelling-dict= - -# List of comma separated words that should be considered directives if they -# appear and the beginning of a comment and should not be checked. -spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -#notes-rgx= - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# class is considered mixin if its name matches the mixin-class-rgx option. -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# Regex pattern to define which classes are considered mixins ignore-mixin- -# members is set to 'yes' -mixin-class-rgx=.*[Mm]ixin - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -# Translation function is (unfortunately) defined as a buildin. -additional-builtins=_ - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of names allowed to shadow builtins -allowed-redefined-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -# On top of the defaults, simple unused is also permissible. -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=past.builtins,future.builtins,builtins,io - - -[SIMILARITIES] - -# Comments are removed from the similarity computation -ignore-comments=yes - -# Docstrings are removed from the similarity computation -ignore-docstrings=yes - -# Imports are removed from the similarity computation -ignore-imports=no - -# Signatures are removed from the similarity computation -ignore-signatures=no - -# Minimum lines number of a similarity. -# Matching only larger chunks of code, not the default 4 lines. -min-similarity-lines=10 - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class constant names. -class-const-naming-style=UPPER_CASE - -# Regular expression matching correct class constant names. Overrides class- -# const-naming-style. -#class-const-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - x, - y, - z, - ex, - Run, - _ - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules= - -# Output a graph (.gv or any supported image format) of external dependencies -# to the given file (report RP0402 must not be disabled). -ext-import-graph= - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be -# disabled). -import-graph= - -# Output a graph (.gv or any supported image format) of internal dependencies -# to the given file (report RP0402 must not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[CLASSES] - -# Warn about protected attribute access inside special methods -check-protected-access-in-special-methods=no - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[DESIGN] - -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -exclude-too-few-public-methods= - -# List of qualified class names to ignore when counting class parents (see -# R0901) -ignored-parents= - -# Maximum number of arguments for function / method. -# We tend to have function with more arguments than the default 5 -# and that doesn't seem to be the problem of our code. -max-args=12 - -# Maximum number of attributes for a class (see R0902). -# We tend to have classes with more attributes than the default 7. -max-attributes=15 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -# We add 3 more to the default to make it easier to adopt the check quickly. -# (Many branches may or may not be an issue, but in any case this is potential -# place to improve.) -max-branches=15 - -# Maximum number of locals for function / method body. -# Since we allow many more arguments, we leave some space for locals as well, -# although the default 15 should be high enough. -max-locals=17 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=1 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/python/grass/app/__init__.py b/python/grass/app/__init__.py index df22d8bbb44..b7ff0ce08c0 100644 --- a/python/grass/app/__init__.py +++ b/python/grass/app/__init__.py @@ -1 +1,17 @@ -from .data import * +from .data import ( + get_possible_database_path, + create_database_directory, + create_startup_location_in_grassdb, + ensure_default_data_hierarchy, + MapsetLockingException, + lock_mapset, +) + +__all__ = [ + "MapsetLockingException", + "create_database_directory", + "create_startup_location_in_grassdb", + "ensure_default_data_hierarchy", + "get_possible_database_path", + "lock_mapset", +] diff --git a/python/grass/app/runtime.py b/python/grass/app/runtime.py index 27f35760b8f..1640d11a853 100644 --- a/python/grass/app/runtime.py +++ b/python/grass/app/runtime.py @@ -36,16 +36,18 @@ def get_grass_config_dir(major_version, minor_version, env): config_dir = env.get(env_dirname) if config_dir is None: - raise RuntimeError( + msg = ( f"The {env_dirname} variable is not set, ask your operating" " system support" ) + raise RuntimeError(msg) if not os.path.isdir(config_dir): - raise NotADirectoryError( + msg = ( f"The {env_dirname} variable points to directory which does" " not exist, ask your operating system support" ) + raise NotADirectoryError(msg) if WINDOWS: config_dirname = f"GRASS{major_version}" diff --git a/python/grass/benchmark/app.py b/python/grass/benchmark/app.py index 835fc0d544b..fb8262df17c 100644 --- a/python/grass/benchmark/app.py +++ b/python/grass/benchmark/app.py @@ -39,10 +39,11 @@ class CliUsageError(ValueError): def join_results_cli(args): """Translate CLI parser result to API calls.""" if args.prefixes and len(args.results) != len(args.prefixes): - raise CliUsageError( + msg = ( f"Number of prefixes ({len(args.prefixes)}) needs to be the same" f" as the number of input result files ({len(args.results)})" ) + raise CliUsageError(msg) def select_only(result): return result.label == args.only diff --git a/python/grass/benchmark/plots.py b/python/grass/benchmark/plots.py index 24afcbcfbae..575463e662e 100644 --- a/python/grass/benchmark/plots.py +++ b/python/grass/benchmark/plots.py @@ -72,10 +72,9 @@ def nprocs_plot(results, filename=None, title=None, metric="time"): ylabel = metric.title() plt.plot(x, getattr(result, metric), label=result.label) else: - raise ValueError( - f"Invalid metric '{metric}' in result, it should be:\ + msg = f"Invalid metric '{metric}' in result, it should be:\ 'time', 'speedup' or 'efficiency'" - ) + raise ValueError(msg) plt.legend() # If there is not many x values, show ticks for each, but use default # ticks when there is a lot of x values. diff --git a/python/grass/benchmark/results.py b/python/grass/benchmark/results.py index 123e5dd8458..3815a47d7f8 100644 --- a/python/grass/benchmark/results.py +++ b/python/grass/benchmark/results.py @@ -103,9 +103,7 @@ def join_results_from_files( source_filenames, prefixes=None, select=None, prefixes_as_labels=False ): """Join multiple files into one results object.""" - to_merge = [] - for result_file in source_filenames: - to_merge.append(load_results_from_file(result_file)) + to_merge = [load_results_from_file(result_file) for result_file in source_filenames] return join_results( to_merge, prefixes=prefixes, diff --git a/python/grass/benchmark/testsuite/test_benchmark.py b/python/grass/benchmark/testsuite/test_benchmark.py index 4eaf766859a..5d883caa1e9 100644 --- a/python/grass/benchmark/testsuite/test_benchmark.py +++ b/python/grass/benchmark/testsuite/test_benchmark.py @@ -52,14 +52,10 @@ def test_resolutions(self): }, ] resolutions = [300, 200, 100] - results = [] - for benchmark in benchmarks: - results.append( - benchmark_resolutions( - **benchmark, - resolutions=resolutions, - ) - ) + results = [ + benchmark_resolutions(**benchmark, resolutions=resolutions) + for benchmark in benchmarks + ] plot_file = "test_res_plot.png" num_cells_plot(results, filename=plot_file) self.assertTrue(Path(plot_file).is_file()) @@ -76,9 +72,9 @@ def test_single(self): "label": label, } ] - results = [] - for benchmark in benchmarks: - results.append(benchmark_single(**benchmark, repeat=repeat)) + results = [ + benchmark_single(**benchmark, repeat=repeat) for benchmark in benchmarks + ] self.assertEqual(len(results), len(benchmarks)) for result in results: self.assertTrue(hasattr(result, "all_times")) @@ -100,9 +96,10 @@ def test_nprocs(self): "max_nprocs": 4, } ] - results = [] - for benchmark in benchmarks: - results.append(benchmark_nprocs(**benchmark, repeat=repeat, shuffle=True)) + results = [ + benchmark_nprocs(**benchmark, repeat=repeat, shuffle=True) + for benchmark in benchmarks + ] self.assertEqual(len(results), len(benchmarks)) for result in results: self.assertTrue(hasattr(result, "times")) diff --git a/python/grass/benchmark/testsuite/test_benchmark_cli.py b/python/grass/benchmark/testsuite/test_benchmark_cli.py index 8e963245250..a50192c2d1d 100644 --- a/python/grass/benchmark/testsuite/test_benchmark_cli.py +++ b/python/grass/benchmark/testsuite/test_benchmark_cli.py @@ -43,8 +43,7 @@ class TestBenchmarkCLI(TestCase): """Tests that benchmarkin CLI works""" json_filename = "plot_test.json" - png_filenames = [f"plot_test1_{i}.png" for i in range(4)] - png_filenames.append("plot_test2.png") + png_filenames = [*[f"plot_test1_{i}.png" for i in range(4)], "plot_test2.png"] def tearDown(self): """Remove test files""" diff --git a/python/grass/docs/_templates/oholosidebar.html b/python/grass/docs/_templates/oholosidebar.html index 06dd9dc7540..bebce37732f 100644 --- a/python/grass/docs/_templates/oholosidebar.html +++ b/python/grass/docs/_templates/oholosidebar.html @@ -1,9 +1,9 @@

    - +

    - +

    - +

    diff --git a/python/grass/docs/conf.py b/python/grass/docs/conf.py index 2d6a564cac0..cccd2e213ac 100644 --- a/python/grass/docs/conf.py +++ b/python/grass/docs/conf.py @@ -128,7 +128,7 @@ # General information about the project. project = "Python library documentation" -copyright = "2024, GRASS Development Team" +copyright = "2025, GRASS Development Team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/python/grass/docs/src/gunittest_testing.rst b/python/grass/docs/src/gunittest_testing.rst index 62f258f99c8..20df0245e7e 100644 --- a/python/grass/docs/src/gunittest_testing.rst +++ b/python/grass/docs/src/gunittest_testing.rst @@ -387,7 +387,7 @@ in different locations. .. todo:: Add example of assertions of key-value results. -Especially if a module module has a lot of different parameters allowed +Especially if a module has a lot of different parameters allowed in different combinations, you should test the if the wrong ones are really disallowed and proper error messages are provided (in addition, you can test things such as creation and removal of maps in error states). diff --git a/python/grass/experimental/__init__.py b/python/grass/experimental/__init__.py index 1253d09df0a..8ccb44413bb 100644 --- a/python/grass/experimental/__init__.py +++ b/python/grass/experimental/__init__.py @@ -1,4 +1,11 @@ """Experimental code, all can change""" -from .create import * -from .mapset import * +from .create import require_create_ensure_mapset, create_temporary_mapset +from .mapset import MapsetSession, TemporaryMapsetSession + +__all__ = [ + "MapsetSession", + "TemporaryMapsetSession", + "create_temporary_mapset", + "require_create_ensure_mapset", +] diff --git a/python/grass/experimental/create.py b/python/grass/experimental/create.py index afe8f9c7b69..8581691ac42 100644 --- a/python/grass/experimental/create.py +++ b/python/grass/experimental/create.py @@ -42,15 +42,17 @@ def require_create_ensure_mapset( if overwrite: delete_mapset(path.directory, path.location, path.mapset) else: - raise ValueError( + msg = ( f"Mapset '{path.mapset}' already exists, " "use a different name, overwrite, or ensure" ) + raise ValueError(msg) if create or (ensure and not exists): create_mapset(path.directory, path.location, path.mapset) elif not exists or not is_mapset_valid(path): reason = get_mapset_invalid_reason(path.directory, path.location, path.mapset) - raise ValueError(f"Mapset {path.mapset} is not valid: {reason}") + msg = f"Mapset {path.mapset} is not valid: {reason}" + raise ValueError(msg) def create_temporary_mapset(path, location=None) -> MapsetPath: diff --git a/python/grass/experimental/mapset.py b/python/grass/experimental/mapset.py index e6b818f0a2c..a39bb661618 100644 --- a/python/grass/experimental/mapset.py +++ b/python/grass/experimental/mapset.py @@ -1,4 +1,4 @@ -"Session or subsession for mapsets (subprojects)" +"""Session or subsession for mapsets (subprojects)""" import shutil import os @@ -92,7 +92,8 @@ def finish(self): the environment obtained from this object. """ if not self.active: - raise ValueError("Attempt to finish an already finished session") + msg = "Attempt to finish an already finished session" + raise ValueError(msg) os.remove(self._session_file) self._active = False @@ -104,9 +105,8 @@ def __enter__(self): :returns: reference to the object (self) """ if not self.active: - raise ValueError( - "Attempt to use inactive (finished) session as a context manager" - ) + msg = "Attempt to use inactive (finished) session as a context manager" + raise ValueError(msg) return self def __exit__(self, type, value, traceback): # pylint: disable=redefined-builtin @@ -211,7 +211,8 @@ def finish(self): the environment obtained from this object. """ if not self.active: - raise ValueError("Attempt to finish an already finished session") + msg = "Attempt to finish an already finished session" + raise ValueError(msg) self._active = False os.remove(self._session_file) shutil.rmtree(self._path.path, ignore_errors=True) @@ -224,9 +225,8 @@ def __enter__(self): :returns: reference to the object (self) """ if not self.active: - raise ValueError( - "Attempt to use inactive (finished) session as a context manager" - ) + msg = "Attempt to use inactive (finished) session as a context manager" + raise ValueError(msg) return self def __exit__(self, type, value, traceback): # pylint: disable=redefined-builtin diff --git a/python/grass/grassdb/checks.py b/python/grass/grassdb/checks.py index 369cdf60e62..39dd1081fdf 100644 --- a/python/grass/grassdb/checks.py +++ b/python/grass/grassdb/checks.py @@ -9,6 +9,8 @@ .. sectionauthor:: Vaclav Petras """ +from __future__ import annotations + import datetime import glob import os @@ -18,9 +20,10 @@ import grass.grassdb.config as cfg import grass.script as gs from grass.script import gisenv +from itertools import starmap -def mapset_exists(path, location=None, mapset=None): +def mapset_exists(path: str | os.PathLike[str], location=None, mapset=None) -> bool: """Returns True whether mapset path exists. Either only *path* is provided or all three parameters need to be provided. @@ -30,27 +33,27 @@ def mapset_exists(path, location=None, mapset=None): :param mapset: name of a Mapset if not part of *path* """ if location and mapset: - path = os.path.join(path, location, mapset) + path = Path(path, location, mapset) elif location or mapset: raise ValueError(_("Provide only path or all three parameters, not two")) - return os.path.exists(path) + return Path(path).exists() -def location_exists(path, location=None): +def location_exists(path: str | os.PathLike[str], location=None) -> bool: """Returns True whether location path exists. :param path: Path to a Location or to a GRASS GIS database directory :param location: name of a Location if not part of *path* """ if location: - path = os.path.join(path, location) - return os.path.exists(path) + path = Path(path, location) + return Path(path).exists() # TODO: distinguish between valid for getting maps and usable as current # https://lists.osgeo.org/pipermail/grass-dev/2016-September/082317.html # interface created according to the current usage -def is_mapset_valid(path, location=None, mapset=None): +def is_mapset_valid(path: str | os.PathLike[str], location=None, mapset=None) -> bool: """Return True if GRASS Mapset is valid Either only *path* is provided or all three parameters need to be provided. @@ -64,13 +67,13 @@ def is_mapset_valid(path, location=None, mapset=None): # WIND doesn't exist (assuming that neither GRASS_REGION nor # WIND_OVERRIDE environmental variables are set). if location and mapset: - path = os.path.join(path, location, mapset) + path = Path(path, location, mapset) elif location or mapset: raise ValueError(_("Provide only path or all three parameters, not two")) - return os.access(os.path.join(path, "WIND"), os.R_OK) + return os.access(Path(path, "WIND"), os.R_OK) -def is_location_valid(path, location=None): +def is_location_valid(path: str | os.PathLike[str], location=None) -> bool: """Return True if GRASS Location is valid :param path: Path to a Location or to a GRASS GIS database directory @@ -81,8 +84,8 @@ def is_location_valid(path, location=None): # containing a PERMANENT/DEFAULT_WIND file is probably a GRASS # location, while a directory lacking it probably isn't. if location: - path = os.path.join(path, location) - return os.access(os.path.join(path, "PERMANENT", "DEFAULT_WIND"), os.F_OK) + path = Path(path, location) + return os.access(Path(path, "PERMANENT", "DEFAULT_WIND"), os.F_OK) def is_mapset_current(database, location, mapset) -> bool: @@ -101,7 +104,7 @@ def is_location_current(database, location) -> bool: return bool(database == genv["GISDBASE"] and location == genv["LOCATION_NAME"]) -def is_current_user_mapset_owner(mapset_path): +def is_current_user_mapset_owner(mapset_path: str | os.PathLike[str]) -> bool: """Returns True if mapset owner is the current user. On Windows it always returns True.""" # Note that this does account for libgis built with SKIP_MAPSET_OWN_CHK @@ -118,12 +121,12 @@ def is_current_user_mapset_owner(mapset_path): return mapset_uid == os.getuid() -def is_different_mapset_owner(mapset_path): +def is_different_mapset_owner(mapset_path: str | os.PathLike[str]) -> bool: """Returns True if mapset owner is different from the current user""" return not is_current_user_mapset_owner(mapset_path) -def get_mapset_owner(mapset_path): +def get_mapset_owner(mapset_path: str | os.PathLike[str]) -> str | None: """Returns mapset owner name or None if owner name unknown. On Windows it always returns None.""" if sys.platform == "win32": @@ -164,27 +167,27 @@ def is_first_time_user(): return False -def is_mapset_locked(mapset_path): +def is_mapset_locked(mapset_path: str | os.PathLike[str]) -> bool: """Check if the mapset is locked""" lock_name = ".gislock" - lockfile = os.path.join(mapset_path, lock_name) - return os.path.exists(lockfile) + lockfile = Path(mapset_path, lock_name) + return lockfile.exists() -def get_lockfile_if_present(database, location, mapset): +def get_lockfile_if_present(database, location, mapset) -> str | None: """Return path to lock if present, None otherwise Returns the path as a string or None if nothing was found, so the return value can be used to test if the lock is present. """ lock_name = ".gislock" - lockfile = os.path.join(database, location, mapset, lock_name) - if os.path.isfile(lockfile): - return lockfile + lockfile = Path(database, location, mapset, lock_name) + if lockfile.is_file(): + return str(lockfile) return None -def get_mapset_lock_info(mapset_path): +def get_mapset_lock_info(mapset_path: str | os.PathLike[str]): """Get information about .gislock file. Assumes lock file exists, use is_mapset_locked to find out. Returns information as a dictionary with keys @@ -233,13 +236,14 @@ def get_reason_id_mapset_not_usable(mapset_path): return None -def dir_contains_location(path): +def dir_contains_location(path: str | os.PathLike[str]) -> bool: """Return True if directory *path* contains a valid location""" - if not os.path.isdir(path): + p = Path(path) + if not p.is_dir(): return False - for name in os.listdir(path): - if os.path.isdir(os.path.join(path, name)): - if is_location_valid(path, name): + for name in p.iterdir(): + if name.is_dir(): + if is_location_valid(name): return True return False @@ -260,8 +264,8 @@ def get_mapset_invalid_reason(database, location, mapset, none_for_no_reason=Fal # Since we are trying to get the one most likely message, we need all # those return statements here. # pylint: disable=too-many-return-statements - location_path = os.path.join(database, location) - mapset_path = os.path.join(location_path, mapset) + location_path = Path(database, location) + mapset_path = location_path / mapset # first checking the location validity # perhaps a special set of checks with different messages mentioning mapset # will be needed instead of the same set of messages used for location @@ -271,14 +275,14 @@ def get_mapset_invalid_reason(database, location, mapset, none_for_no_reason=Fal if location_msg: return location_msg # if location is valid, check mapset - if mapset not in os.listdir(location_path): + if not mapset_path.exists(): # TODO: remove the grass.py specific wording return _( "Mapset <{mapset}> doesn't exist in GRASS Location <{location}>" ).format(mapset=mapset, location=location) - if not os.path.isdir(mapset_path): + if not mapset_path.is_dir(): return _("<%s> is not a GRASS Mapset because it is not a directory") % mapset - if not os.path.isfile(os.path.join(mapset_path, "WIND")): + if not (mapset_path / "WIND").is_file(): return ( _( "<%s> is not a valid GRASS Mapset" @@ -287,7 +291,7 @@ def get_mapset_invalid_reason(database, location, mapset, none_for_no_reason=Fal % mapset ) # based on the is_mapset_valid() function - if not os.access(os.path.join(mapset_path, "WIND"), os.R_OK): + if not os.access(mapset_path / "WIND", os.R_OK): return ( _( "<%s> is not a valid GRASS Mapset" @@ -303,7 +307,9 @@ def get_mapset_invalid_reason(database, location, mapset, none_for_no_reason=Fal ).format(mapset=mapset, location=location) -def get_location_invalid_reason(database, location, none_for_no_reason=False): +def get_location_invalid_reason( + database, location, none_for_no_reason=False +) -> str | None: """Returns a message describing what is wrong with the Location The goal is to provide the most suitable error message @@ -321,14 +327,14 @@ def get_location_invalid_reason(database, location, none_for_no_reason=False): :param none_for_no_reason: When True, return None when reason is unknown :returns: translated message or None """ - location_path = os.path.join(database, location) - permanent_path = os.path.join(location_path, "PERMANENT") + location_path = Path(database, location) + permanent_path = location_path / "PERMANENT" # directory - if not os.path.exists(location_path): + if not location_path.exists(): return _("Location <%s> doesn't exist") % location_path # permanent mapset - if "PERMANENT" not in os.listdir(location_path): + if not permanent_path.exists(): return ( _( "<%s> is not a valid GRASS Location" @@ -336,7 +342,7 @@ def get_location_invalid_reason(database, location, none_for_no_reason=False): ) % location_path ) - if not os.path.isdir(permanent_path): + if not permanent_path.is_dir(): return ( _( "<%s> is not a valid GRASS Location" @@ -345,7 +351,7 @@ def get_location_invalid_reason(database, location, none_for_no_reason=False): % location_path ) # partially based on the is_location_valid() function - if not os.path.isfile(os.path.join(permanent_path, "DEFAULT_WIND")): + if not (permanent_path / "DEFAULT_WIND").is_file(): return ( _( "<%s> is not a valid GRASS Location" @@ -362,14 +368,14 @@ def get_location_invalid_reason(database, location, none_for_no_reason=False): ) -def get_location_invalid_suggestion(database, location): +def get_location_invalid_suggestion(database, location) -> str | None: """Return suggestion what to do when specified location is not valid It gives suggestion when: * A mapset was specified instead of a location. * A GRASS database was specified instead of a location. """ - location_path = os.path.join(database, location) + location_path = Path(database, location) # a common error is to use mapset instead of location, # if that's the case, include that info into the message if is_mapset_valid(location_path): @@ -531,10 +537,7 @@ def get_reasons_locations_not_removable(locations): Returns messages as list if there were any failed checks, otherwise empty list. """ - messages = [] - for grassdb, location in locations: - messages += get_reasons_location_not_removable(grassdb, location) - return messages + return list(starmap(get_reasons_location_not_removable, locations)) def get_reasons_location_not_removable(grassdb, location): @@ -565,9 +568,7 @@ def get_reasons_location_not_removable(grassdb, location): ) # Append to the list of tuples - mapsets = [] - for g_mapset in g_mapsets: - mapsets.append((grassdb, location, g_mapset)) + mapsets = [(grassdb, location, g_mapset) for g_mapset in g_mapsets] # Concentenate both checks messages += get_reasons_mapsets_not_removable(mapsets, check_permanent=False) @@ -596,9 +597,7 @@ def get_reasons_grassdb_not_removable(grassdb): g_locations = get_list_of_locations(grassdb) # Append to the list of tuples - locations = [] - for g_location in g_locations: - locations.append((grassdb, g_location)) + locations = [(grassdb, g_location) for g_location in g_locations] return get_reasons_locations_not_removable(locations) @@ -609,12 +608,11 @@ def get_list_of_locations(dbase): :return: list of locations (sorted) """ - locations = [] - for location in glob.glob(os.path.join(dbase, "*")): - if os.path.join(location, "PERMANENT") in glob.glob( - os.path.join(location, "*") - ): - locations.append(os.path.basename(location)) + locations = [ + os.path.basename(location) + for location in glob.glob(os.path.join(dbase, "*")) + if os.path.join(location, "PERMANENT") in glob.glob(os.path.join(location, "*")) + ] locations.sort(key=lambda x: x.lower()) diff --git a/python/grass/grassdb/history.py b/python/grass/grassdb/history.py index d7c22eb36fc..d168c0aa184 100644 --- a/python/grass/grassdb/history.py +++ b/python/grass/grassdb/history.py @@ -313,7 +313,8 @@ def add_entry(history_path, entry): if get_history_file_extension(history_path) == ".json": _add_entry_to_JSON(history_path, entry) else: - raise ValueError("Adding entries is supported only for JSON format.") + msg = "Adding entries is supported only for JSON format." + raise ValueError(msg) def _update_entry_in_JSON(history_path, command_info, index=None): @@ -360,7 +361,8 @@ def update_entry(history_path, command_info, index=None): if get_history_file_extension(history_path) == ".json": _update_entry_in_JSON(history_path, command_info, index) else: - raise ValueError("Updating entries is supported only for JSON format.") + msg = "Updating entries is supported only for JSON format." + raise ValueError(msg) def copy(history_path, target_path): diff --git a/python/grass/gunittest/case.py b/python/grass/gunittest/case.py index 4e0ee3ab24d..601514c9cc9 100644 --- a/python/grass/gunittest/case.py +++ b/python/grass/gunittest/case.py @@ -48,6 +48,7 @@ class TestCase(unittest.TestCase): Be especially careful and always use keyword argument syntax for *msg* parameter. """ + longMessage = True # to get both standard and custom message maxDiff = None # we can afford long diffs _temp_region = None # to control the temporary region @@ -133,14 +134,17 @@ def del_temp_region(cls): name = os.environ.pop("WIND_OVERRIDE") if name != cls._temp_region: # be strict about usage of region - raise RuntimeError( + msg = ( "Inconsistent use of" " TestCase.use_temp_region, WIND_OVERRIDE" " or temporary region in general\n" "Region to which should be now deleted ({n})" " by TestCase class" "does not correspond to currently set" - " WIND_OVERRIDE ({c})", + " WIND_OVERRIDE ({c})" + ) + raise RuntimeError( + msg, n=cls._temp_region, c=name, ) @@ -263,12 +267,13 @@ def assertModuleKeyValue( else: # we can probably remove this once we have more tests # of keyvalue_equals and diff_keyvalue against each other - raise RuntimeError( + msg = ( "keyvalue_equals() showed difference but" " diff_keyvalue() did not. This can be" " a bug in one of them or in the caller" " (assertModuleKeyValue())" ) + raise RuntimeError(msg) self.fail(self._formatMessage(msg, stdMsg)) def assertRasterFitsUnivar(self, raster, reference, precision=None, msg=None): @@ -1320,10 +1325,10 @@ def runModule(cls, module, expecting_stdout=False, **kwargs): # TODO: standardized error code would be handy here import re - if re.search("Raster map.*not found", errors, flags=re.DOTALL): + if re.search(r"Raster map.*not found", errors, flags=re.DOTALL): errors += "\nSee available raster maps:\n" errors += call_module("g.list", type="raster") - if re.search("Vector map.*not found", errors, flags=re.DOTALL): + if re.search(r"Vector map.*not found", errors, flags=re.DOTALL): errors += "\nSee available vector maps:\n" errors += call_module("g.list", type="vector") # TODO: message format, parameters @@ -1331,7 +1336,7 @@ def runModule(cls, module, expecting_stdout=False, **kwargs): module.name, module.get_python(), module.returncode, errors=errors ) # TODO: use this also in assert and apply when appropriate - if expecting_stdout and not module.outputs.stdout.strip(): + if expecting_stdout and (not module.outputs.stdout.strip()): if module.outputs.stderr: errors = " The errors are:\n" + module.outputs.stderr else: @@ -1441,9 +1446,11 @@ def assertModuleFail(self, module, msg=None, **kwargs): def _module_from_parameters(module, **kwargs): if kwargs: if not isinstance(module, str): - raise ValueError("module can be only string or PyGRASS Module") + msg = "module can be only string or PyGRASS Module" + raise ValueError(msg) if isinstance(module, Module): - raise ValueError("module can be only string if other parameters are given") + msg = "module can be only string if other parameters are given" + raise ValueError(msg) # allow passing all parameters in one dictionary called parameters if list(kwargs.keys()) == ["parameters"]: kwargs = kwargs["parameters"] @@ -1454,20 +1461,24 @@ def _module_from_parameters(module, **kwargs): def _check_module_run_parameters(module): # in this case module already run and we would start it again if module.run_: - raise ValueError("Do not run the module manually, set run_=False") + msg = "Do not run the module manually, set run_=False" + raise ValueError(msg) if not module.finish_: - raise ValueError( + msg = ( "This function will always finish module run," " set finish_=None or finish_=True." ) + raise ValueError(msg) # we expect most of the usages with stdout=PIPE # TODO: in any case capture PIPE always? if module.stdout_ is None: module.stdout_ = subprocess.PIPE elif module.stdout_ != subprocess.PIPE: - raise ValueError("stdout_ can be only PIPE or None") + msg = "stdout_ can be only PIPE or None" + raise ValueError(msg) if module.stderr_ is None: module.stderr_ = subprocess.PIPE elif module.stderr_ != subprocess.PIPE: - raise ValueError("stderr_ can be only PIPE or None") + msg = "stderr_ can be only PIPE or None" + raise ValueError(msg) # because we want to capture it diff --git a/python/grass/gunittest/checkers.py b/python/grass/gunittest/checkers.py index c63f6d08ff0..f83fda80216 100644 --- a/python/grass/gunittest/checkers.py +++ b/python/grass/gunittest/checkers.py @@ -9,14 +9,20 @@ :authors: Vaclav Petras, Soeren Gebbert """ -import os -import sys -import re +from __future__ import annotations + import doctest import hashlib +import os +import re +import sys +from typing import TYPE_CHECKING, Any from grass.script.utils import encode +if TYPE_CHECKING: + from collections.abc import Callable, Mapping + try: from grass.script.core import KeyValue except (ImportError, AttributeError): @@ -226,11 +232,10 @@ def text_to_keyvalue( # line contains something but not separator if not skip_invalid: # TODO: here should go _ for translation - raise ValueError( - ("Line <{l}> does not contain separator <{s}>.").format( - l=line, s=sep - ) + msg = ("Line <{l}> does not contain separator <{s}>.").format( + l=line, s=sep ) + raise ValueError(msg) # if we get here we are silently ignoring the line # because it is invalid (does not contain key-value separator) or # because it is empty @@ -255,7 +260,7 @@ def text_to_keyvalue( # TODO: define standard precisions for DCELL, FCELL, CELL, mm, ft, cm, ... # TODO: decide if None is valid, and use some default or no compare # TODO: is None a valid value for precision? -def values_equal(value_a, value_b, precision=0.000001): +def values_equal(value_a, value_b, precision: float = 0.000001) -> bool: """ >>> values_equal(1.022, 1.02, precision=0.01) True @@ -267,7 +272,7 @@ def values_equal(value_a, value_b, precision=0.000001): True >>> values_equal("Hello", "hello") False - """ + """ # noqa: D402; Add a summary # each if body needs to handle only not equal state if isinstance(value_a, float) and isinstance(value_b, float): @@ -277,9 +282,8 @@ def values_equal(value_a, value_b, precision=0.000001): # in Python 3 None < 3 raises TypeError precision = float(precision) if precision < 0: - raise ValueError( - "precision needs to be greater than or equal to zero: {precision} < 0" - ) + msg = "precision needs to be greater than or equal to zero: {precision} < 0" + raise ValueError(msg) if abs(value_a - value_b) > precision: return False @@ -318,8 +322,13 @@ def values_equal(value_a, value_b, precision=0.000001): def keyvalue_equals( - dict_a, dict_b, precision, def_equal=values_equal, key_equal=None, a_is_subset=False -): + dict_a: Mapping, + dict_b: Mapping, + precision: float, + def_equal: Callable = values_equal, + key_equal: Mapping[Any, Callable] | None = None, + a_is_subset: bool = False, +) -> bool: """Compare two dictionaries. .. note:: @@ -350,8 +359,8 @@ def keyvalue_equals( :param dict_b: second dictionary :param precision: precision with which the floating point values are compared (passed to equality functions) - :param callable def_equal: function used for comparison by default - :param dict key_equal: dictionary of functions used for comparison + :param def_equal: function used for comparison by default + :param key_equal: dictionary of functions used for comparison of specific keys, `def_equal` is used for the rest, keys in dictionary are keys in `dict_a` and `dict_b` dictionaries, values are the functions used to comapare the given key @@ -360,7 +369,7 @@ def keyvalue_equals( :return: `True` if identical, `False` if different - Use `diff_keyvalue()` to get information about differeces. + Use `diff_keyvalue()` to get information about differences. You can use this function to find out if there is a difference and then use `diff_keyvalue()` to determine all the differences between dictionaries. @@ -369,7 +378,7 @@ def keyvalue_equals( if not a_is_subset and sorted(dict_a.keys()) != sorted(dict_b.keys()): return False - b_keys = dict_b.keys() if a_is_subset else None + b_keys = dict_b.keys() if a_is_subset else set() # iterate over subset or just any if not a_is_subset # check for missing keys in superset @@ -398,7 +407,7 @@ def diff_keyvalue( (['d'], ['a'], [('c', 2, 1)]) You can provide only a subset of values in dict_a, in this case - first item in tuple is an emptu list:: + first item in tuple is an empty list:: >>> diff_keyvalue(a, b, a_is_subset=True, precision=0) ([], ['a'], [('c', 2, 1)]) @@ -533,7 +542,7 @@ def check_text_ellipsis(reference, actual) -> bool: >>> check_text_ellipsis("Result: [569] (...)", "Result: 9 (too high)") False - """ + """ # noqa: D402; Add a summary ref_escaped = re.escape(reference) exp = re.compile(r"\\\.\\\.\\\.") # matching escaped ... ref_regexp = exp.sub(".+", ref_escaped) + "$" @@ -592,7 +601,7 @@ def check_text_ellipsis_doctest(reference, actual): ... optionflags=doctest.ELLIPSIS, ... ) False - """ + """ # noqa: D402; Add a summary # this can be also global checker = doctest.OutputChecker() return checker.check_output(reference, actual, optionflags=doctest.ELLIPSIS) diff --git a/python/grass/gunittest/invoker.py b/python/grass/gunittest/invoker.py index 29b62830e25..b6e3e5bca99 100644 --- a/python/grass/gunittest/invoker.py +++ b/python/grass/gunittest/invoker.py @@ -286,10 +286,11 @@ def run_in_location(self, gisdbase, location, location_type, results_dir, exclud not to one file as these will simply contain the last executed file. """ if os.path.abspath(results_dir) == os.path.abspath(self.start_dir): - raise RuntimeError( + msg = ( "Results root directory should not be the same" " as discovery start directory" ) + raise RuntimeError(msg) self.reporter = GrassTestFilesMultiReporter( reporters=[ GrassTestFilesTextReporter(stream=sys.stderr), diff --git a/python/grass/gunittest/loader.py b/python/grass/gunittest/loader.py index 50fa368f62a..2e8eaee7f95 100644 --- a/python/grass/gunittest/loader.py +++ b/python/grass/gunittest/loader.py @@ -228,9 +228,7 @@ def discover(self, start_dir, pattern="test*.py", top_level_dir=None): universal_location_value=self.universal_tests_value, import_modules=True, ) - tests = [] - for module in modules: - tests.append(self.loadTestsFromModule(module.module)) + tests = [self.loadTestsFromModule(module.module) for module in modules] return self.suiteClass(tests) diff --git a/python/grass/gunittest/main.py b/python/grass/gunittest/main.py index 741db5e43d2..fe39e7ca23b 100644 --- a/python/grass/gunittest/main.py +++ b/python/grass/gunittest/main.py @@ -147,7 +147,8 @@ def get_config(start_directory, config_file): # Does not check presence of the file config_parser.read(config_file) else: - raise ValueError("Either start_directory or config_file must be set") + msg = "Either start_directory or config_file must be set" + raise ValueError(msg) if "gunittest" not in config_parser: # Create an empty section if file is not available or section is not present. config_parser.read_dict({"gunittest": {}}) diff --git a/python/grass/gunittest/reporters.py b/python/grass/gunittest/reporters.py index 7c93872a656..5b468ac1d91 100644 --- a/python/grass/gunittest/reporters.py +++ b/python/grass/gunittest/reporters.py @@ -760,7 +760,7 @@ def end_file_test( # TODO: replace by better handling of potential lists when parsing # TODO: create link to module if running in grass or in addons # alternatively a link to module test summary - if type(modules) is not list: + if not isinstance(modules, list): modules = [modules] # here we would have also links to coverage, profiling, ... @@ -853,41 +853,34 @@ def finish(self): svn_info = get_svn_info() svn_revision = "" if not svn_info else svn_info["revision"] - summary = {} - summary["files_total"] = self.test_files - summary["files_successes"] = self.files_pass - summary["files_failures"] = self.files_fail - - summary["names"] = self.names - summary["tested_dirs"] = self.tested_dirs - # TODO: we don't have a general mechanism for storing any type in text - summary["files_returncodes"] = [str(item) for item in self.files_returncodes] - - # let's use seconds as a universal time delta format - # (there is no standard way how to store time delta as string) - summary["time"] = self.main_time.total_seconds() - - status = "failed" if self.files_fail else "succeeded" - summary["status"] = status - - summary["total"] = self.total - summary["successes"] = self.successes - summary["failures"] = self.failures - summary["errors"] = self.errors - summary["skipped"] = self.skipped - summary["expected_failures"] = self.expected_failures - summary["unexpected_successes"] = self.unexpected_success - - summary["test_files_authors"] = self.test_files_authors - summary["tested_modules"] = self.modules - summary["svn_revision"] = svn_revision - # ignoring issues with time zones - summary["timestamp"] = self.main_start_time.strftime("%Y-%m-%d %H:%M:%S") - # TODO: add some general metadata here (passed in constructor) - - # add additional information - for key, value in self._info.items(): - summary[key] = value + summary = { + "files_total": self.test_files, + "files_successes": self.files_pass, + "files_failures": self.files_fail, + "names": self.names, + "tested_dirs": self.tested_dirs, + # TODO: we don't have a general mechanism for storing any type in text + "files_returncodes": [str(item) for item in self.files_returncodes], + # let's use seconds as a universal time delta format + # (there is no standard way how to store time delta as string) + "time": self.main_time.total_seconds(), + "status": "failed" if self.files_fail else "succeeded", + "total": self.total, + "successes": self.successes, + "failures": self.failures, + "errors": self.errors, + "skipped": self.skipped, + "expected_failures": self.expected_failures, + "unexpected_successes": self.unexpected_success, + "test_files_authors": self.test_files_authors, + "tested_modules": self.modules, + "svn_revision": svn_revision, + # ignoring issues with time zones + "timestamp": self.main_start_time.strftime("%Y-%m-%d %H:%M:%S"), + # TODO: add some general metadata here (passed in constructor) + # add additional information + **dict(self._info.items()), + } summary_filename = os.path.join(self.result_dir, "test_keyvalue_result.txt") text = keyvalue_to_text(summary, sep="=", vsep="\n", isep=",") @@ -959,9 +952,6 @@ def __init__(self, stream): super().__init__() self._stream = stream - def start(self, results_dir): - super().start(results_dir) - def finish(self): super().finish() @@ -1129,7 +1119,7 @@ def report_for_dir(self, root, directory, test_files): test_file_authors = summary.get("test_file_authors") if not test_file_authors: test_file_authors = [] - if type(test_file_authors) is not list: + if not isinstance(test_file_authors, list): test_file_authors = [test_file_authors] test_files_authors.extend(test_file_authors) diff --git a/python/grass/gunittest/runner.py b/python/grass/gunittest/runner.py index 0bbd0f361b9..e27305cacbc 100644 --- a/python/grass/gunittest/runner.py +++ b/python/grass/gunittest/runner.py @@ -26,8 +26,8 @@ class _WritelnDecorator: def __init__(self, stream): self.stream = stream - def __getattr__(self, attr): - if attr in ("stream", "__getstate__"): + def __getattr__(self, attr: str): + if attr in {"stream", "__getstate__"}: raise AttributeError(attr) return getattr(self.stream, attr) @@ -396,7 +396,7 @@ def addUnexpectedSuccess(self, test): raise def printErrors(self): - "Called by TestRunner after test run" + """Called by TestRunner after test run""" super().printErrors() for result in self._results: try: @@ -470,7 +470,7 @@ def __init__( self._result = result def run(self, test): - "Run the given test case or test suite." + """Run the given test case or test suite.""" result = self._result unittest.registerResult(result) result.failfast = self.failfast diff --git a/python/grass/gunittest/testsuite/data/samplecode/submodule_errors/subsubmodule_errors/testsuite/test_error.py b/python/grass/gunittest/testsuite/data/samplecode/submodule_errors/subsubmodule_errors/testsuite/test_error.py index 0be341770a6..d0a6b609479 100644 --- a/python/grass/gunittest/testsuite/data/samplecode/submodule_errors/subsubmodule_errors/testsuite/test_error.py +++ b/python/grass/gunittest/testsuite/data/samplecode/submodule_errors/subsubmodule_errors/testsuite/test_error.py @@ -6,7 +6,8 @@ class TestError(TestCase): # pylint: disable=R0904 def test_something(self): - raise RuntimeError("Error in test function") + msg = "Error in test function" + raise RuntimeError(msg) self.assertTrue(True) @@ -14,7 +15,8 @@ class TestErrorSetUp(TestCase): # pylint: disable=R0904 def setUp(self): - raise RuntimeError("Error in setUp") + msg = "Error in setUp" + raise RuntimeError(msg) def test_something(self): self.assertTrue(True) @@ -24,7 +26,8 @@ class TestErrorTearDown(TestCase): # pylint: disable=R0904 def tearDown(self): - raise RuntimeError("Error in tearDown") + msg = "Error in tearDown" + raise RuntimeError(msg) def test_something(self): self.assertTrue(True) @@ -35,7 +38,8 @@ class TestErrorClassSetUp(TestCase): @classmethod def setUpClass(cls): - raise RuntimeError("Error in setUpClass") + msg = "Error in setUpClass" + raise RuntimeError(msg) def test_something(self): self.assertTrue(True) @@ -46,7 +50,8 @@ class TestErrorClassTearDown(TestCase): @classmethod def tearDownClass(cls): - raise RuntimeError("Error in tearDownClass") + msg = "Error in tearDownClass" + raise RuntimeError(msg) def test_something(self): self.assertTrue(True) diff --git a/python/grass/gunittest/testsuite/data/samplecode/testsuite/test_good_and_bad.py b/python/grass/gunittest/testsuite/data/samplecode/testsuite/test_good_and_bad.py index ef3f36b89bd..d9e207d6359 100644 --- a/python/grass/gunittest/testsuite/data/samplecode/testsuite/test_good_and_bad.py +++ b/python/grass/gunittest/testsuite/data/samplecode/testsuite/test_good_and_bad.py @@ -15,7 +15,8 @@ def test_something_failing(self): self.assertTrue(False, msg="This failed in test_good_and_bad") def test_something_erroring(self): - raise RuntimeError("Some error which was raised") + msg = "Some error which was raised" + raise RuntimeError(msg) self.assertTrue(True, msg="This should not fail in test_good_and_bad") diff --git a/python/grass/imaging/images2avi.py b/python/grass/imaging/images2avi.py index 9776b9c0db2..2dda32e142d 100644 --- a/python/grass/imaging/images2avi.py +++ b/python/grass/imaging/images2avi.py @@ -28,7 +28,7 @@ # # changes of this file GRASS (PNG instead of JPG) by Anna Petrasova 2013 -""" Module images2avi +"""Module images2avi Uses ffmpeg to read and write AVI files. Requires PIL @@ -193,7 +193,8 @@ def readAvi(filename, asNumpy=True): print(S.stderr.read()) # Clean up _cleanDir(tempDir) - raise RuntimeError("Could not read avi.") + msg = "Could not read avi." + raise RuntimeError(msg) # Read images images = images2ims.readIms(os.path.join(tempDir, "im*.jpg"), asNumpy) diff --git a/python/grass/imaging/images2gif.py b/python/grass/imaging/images2gif.py index d37ec0c4e32..5c4e9dec3cb 100644 --- a/python/grass/imaging/images2gif.py +++ b/python/grass/imaging/images2gif.py @@ -24,7 +24,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" Module images2gif +"""Module images2gif Provides functionality for reading and writing animated GIF images. Use writeGif to write a series of numpy arrays or PIL images as an @@ -101,8 +101,7 @@ def get_cKDTree(): def checkImages(images): - """checkImages(images) - Check numpy images and correct intensity range etc. + """Check numpy images and correct intensity range etc. The same for all movie formats. :param images: @@ -119,7 +118,7 @@ def checkImages(images): # Check and convert dtype if im.dtype == np.uint8: images2.append(im) # Ok - elif im.dtype in [np.float32, np.float64]: + elif im.dtype in (np.float32, np.float64): im = im.copy() im[im < 0] = 0 im[im > 1] = 1 @@ -133,9 +132,11 @@ def checkImages(images): pass # ok elif im.ndim == 3: if im.shape[2] not in [3, 4]: - raise ValueError("This array can not represent an image.") + msg = "This array can not represent an image." + raise ValueError(msg) else: - raise ValueError("This array can not represent an image.") + msg = "This array can not represent an image." + raise ValueError(msg) else: raise ValueError("Invalid image type: " + str(type(im))) @@ -213,12 +214,11 @@ def getAppExt(self, loops=float("inf")): # (the extension interprets zero loops # to mean an infinite number of loops) # Mmm, does not seem to work - if True: - bb = "\x21\xFF\x0B" # application extension - bb += "NETSCAPE2.0" - bb += "\x03\x01" - bb += intToBin(loops) - bb += "\x00" # end + bb = "\x21\xFF\x0B" # application extension + bb += "NETSCAPE2.0" + bb += "\x03\x01" + bb += intToBin(loops) + bb += "\x00" # end return bb def getGraphicsControlExt(self, duration=0.1, dispose=2): @@ -267,7 +267,8 @@ def handleSubRectangles(self, images, subRectangles): if len(xy) == len(images): xy = list(xy) else: - raise ValueError("len(xy) doesn't match amount of images.") + msg = "len(xy) doesn't match amount of images." + raise ValueError(msg) else: xy = [xy for im in images] xy[0] = (0, 0) @@ -275,9 +276,10 @@ def handleSubRectangles(self, images, subRectangles): else: # Calculate xy using some basic image processing - # Check Numpy + # Check NumPy if np is None: - raise RuntimeError("Need Numpy to use auto-subRectangles.") + msg = "Need NumPy to use auto-subRectangles." + raise RuntimeError(msg) # First make numpy arrays if required for i in range(len(images)): @@ -286,21 +288,18 @@ def handleSubRectangles(self, images, subRectangles): tmp = im.convert() # Make without palette a = np.asarray(tmp) if len(a.shape) == 0: - raise MemoryError( - "Too little memory to convert PIL image to array" - ) + msg = "Too little memory to convert PIL image to array" + raise MemoryError(msg) images[i] = a # Determine the sub rectangles images, xy = self.getSubRectangles(images) # Done - return images, xy + return (images, xy) def getSubRectangles(self, ims): - """getSubRectangles(ims) - - Calculate the minimal rectangles that need updating each frame. + """Calculate the minimal rectangles that need updating each frame. Returns a two-element tuple containing the cropped images and a list of x-y positions. @@ -312,11 +311,12 @@ def getSubRectangles(self, ims): # Check image count if len(ims) < 2: - return ims, [(0, 0) for i in ims] + return (ims, [(0, 0) for i in ims]) - # We need numpy + # We need NumPy if np is None: - raise RuntimeError("Need Numpy to calculate sub-rectangles. ") + msg = "Need NumPy to calculate sub-rectangles." + raise RuntimeError(msg) # Prepare ims2 = [ims[0]] @@ -334,11 +334,11 @@ def getSubRectangles(self, ims): Y = np.argwhere(diff.sum(1)) # Get rect coordinates if X.size and Y.size: - x0, x1 = int(X[0]), int(X[-1] + 1) - y0, y1 = int(Y[0]), int(Y[-1] + 1) + x0, x1 = (int(X[0]), int(X[-1] + 1)) + y0, y1 = (int(Y[0]), int(Y[-1] + 1)) else: # No change ... make it minimal - x0, x1 = 0, 2 - y0, y1 = 0, 2 + x0, x1 = (0, 2) + y0, y1 = (0, 2) # Cut out and store im2 = im[y0:y1, x0:x1] @@ -349,14 +349,13 @@ def getSubRectangles(self, ims): # Done # print('%1.2f seconds to determine subrectangles of %i images' % # (time.time()-t0, len(ims2))) - return ims2, xy + return (ims2, xy) def convertImagesToPIL(self, images, dither, nq=0): - """convertImagesToPIL(images, nq=0) - - Convert images to Paletted PIL images, which can then be + """Convert images to Paletted PIL images, which can then be written to a single animaged GIF. + convertImagesToPIL(images, nq=0) """ # Convert to PIL images @@ -374,7 +373,7 @@ def convertImagesToPIL(self, images, dither, nq=0): images2.append(im) # Convert to paletted PIL images - images, images2 = images2, [] + images, images2 = (images2, []) if nq >= 1: # NeuQuant algorithm for im in images: @@ -397,9 +396,7 @@ def convertImagesToPIL(self, images, dither, nq=0): return images2 def writeGifToFile(self, fp, images, durations, loops, xys, disposes): - """writeGifToFile(fp, images, durations, loops, xys, disposes) - - Given a set of images writes the bytes to the specified stream. + """Given a set of images writes the bytes to the specified stream. Requires different handling of palette for PIL and Pillow: based on https://github.com/rec/echomesh/blob/master/ code/python/external/images2gif.py @@ -407,7 +404,7 @@ def writeGifToFile(self, fp, images, durations, loops, xys, disposes): """ # Obtain palette for all images and count each occurrence - palettes, occur = [], [] + palettes, occur = ([], []) for im in images: if not pillow: palette = getheader(im)[1] @@ -442,33 +439,28 @@ def writeGifToFile(self, fp, images, durations, loops, xys, disposes): # Next frame is not the first firstFrame = False - if True: - # Write palette and image data - - # Gather info - data = getdata(im) - imdes, data = data[0], data[1:] - graphext = self.getGraphicsControlExt( - durations[frames], disposes[frames] - ) - # Make image descriptor suitable for using 256 local color palette - lid = self.getImageDescriptor(im, xys[frames]) - - # Write local header - if (palette != globalPalette) or (disposes[frames] != 2): - # Use local color palette - fp.write(graphext) - fp.write(lid) # write suitable image descriptor - fp.write(palette) # write local color table - fp.write("\x08") # LZW minimum size code - else: - # Use global color palette - fp.write(graphext) - fp.write(imdes) # write suitable image descriptor + # Write palette and image data + # Gather info + data = getdata(im) + imdes, data = (data[0], data[1:]) + graphext = self.getGraphicsControlExt(durations[frames], disposes[frames]) + # Make image descriptor suitable for using 256 local color palette + lid = self.getImageDescriptor(im, xys[frames]) + + # Write local header + fp.write(graphext) + if (palette != globalPalette) or (disposes[frames] != 2): + # Use local color palette + fp.write(lid) # write suitable image descriptor + fp.write(palette) # write local color table + fp.write("\x08") # LZW minimum size code + else: + # Use global color palette + fp.write(imdes) # write suitable image descriptor - # Write image data - for d in data: - fp.write(d) + # Write image data + for d in data: + fp.write(d) # Prepare for next round frames += 1 @@ -516,9 +508,7 @@ def writeGifPillow(filename, images, duration=0.1, repeat=True): """ loop = 0 if repeat else 1 - quantized = [] - for im in images: - quantized.append(im.quantize()) + quantized = [im.quantize() for im in images] quantized[0].save( filename, save_all=True, @@ -575,7 +565,8 @@ def writeGifVisvis( # Check PIL if PIL is None: - raise RuntimeError("Need PIL to write animated gif files.") + msg = "Need PIL to write animated gif files." + raise RuntimeError(msg) # Check images images = checkImages(images) @@ -596,7 +587,8 @@ def writeGifVisvis( if len(duration) == len(images): duration = list(duration) else: - raise ValueError("len(duration) doesn't match amount of images.") + msg = "len(duration) doesn't match amount of images." + raise ValueError(msg) else: duration = [duration for im in images] @@ -614,7 +606,8 @@ def writeGifVisvis( dispose = defaultDispose if hasattr(dispose, "__len__"): if len(dispose) != len(images): - raise ValueError("len(xy) doesn't match amount of images.") + msg = "len(xy) doesn't match amount of images." + raise ValueError(msg) else: dispose = [dispose for im in images] @@ -637,11 +630,13 @@ def readGif(filename, asNumpy=True): # Check PIL if PIL is None: - raise RuntimeError("Need PIL to read animated gif files.") + msg = "Need PIL to read animated gif files." + raise RuntimeError(msg) - # Check Numpy + # Check NumPy if np is None: - raise RuntimeError("Need Numpy to read animated gif files.") + msg = "Need NumPy to read animated gif files." + raise RuntimeError(msg) # Check whether it exists if not os.path.isfile(filename): @@ -659,7 +654,8 @@ def readGif(filename, asNumpy=True): tmp = pilIm.convert() # Make without palette a = np.asarray(tmp) if len(a.shape) == 0: - raise MemoryError("Too little memory to convert PIL image to array") + msg = "Too little memory to convert PIL image to array" + raise MemoryError(msg) # Store, and next images.append(a) pilIm.seek(pilIm.tell() + 1) @@ -669,9 +665,7 @@ def readGif(filename, asNumpy=True): # Convert to normal PIL images if needed if not asNumpy: images2 = images - images = [] - for im in images2: - images.append(PIL.Image.fromarray(im)) + images = [PIL.Image.fromarray(im) for im in images2] # Done return images @@ -795,15 +789,18 @@ def setconstants(self, samplefac, colors): self.a_s = {} def __init__(self, image, samplefac=10, colors=256): - # Check Numpy + # Check NumPy if np is None: - raise RuntimeError("Need Numpy for the NeuQuant algorithm.") + msg = "Need NumPy for the NeuQuant algorithm." + raise RuntimeError(msg) # Check image if image.size[0] * image.size[1] < NeuQuant.MAXPRIME: - raise OSError("Image is too small") + msg = "Image is too small" + raise OSError(msg) if image.mode != "RGBA": - raise OSError("Image mode should be RGBA.") + msg = "Image mode should be RGBA." + raise OSError(msg) # Initialize self.setconstants(samplefac, colors) @@ -1078,7 +1075,7 @@ def quantize_with_scipy(self, image): return Image.fromarray(px).convert("RGB").quantize(palette=self.paletteImage()) def quantize_without_scipy(self, image): - """ " This function can be used if no scipy is available. + """This function can be used if no scipy is available. It's 7 times slower though. :param image: diff --git a/python/grass/imaging/images2ims.py b/python/grass/imaging/images2ims.py index d86a2247952..257e2162bb6 100644 --- a/python/grass/imaging/images2ims.py +++ b/python/grass/imaging/images2ims.py @@ -24,7 +24,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" Module images2ims +"""Module images2ims Use PIL to create a series of images. @@ -65,7 +65,7 @@ def checkImages(images): images2.append(im) # Ok elif im.dtype in [np.float32, np.float64]: theMax = im.max() - if theMax > 128 and theMax < 300: + if 128 < theMax < 300: pass # assume 0:255 else: im = im.copy() @@ -81,9 +81,11 @@ def checkImages(images): pass # ok elif im.ndim == 3: if im.shape[2] not in [3, 4]: - raise ValueError("This array can not represent an image.") + msg = "This array can not represent an image." + raise ValueError(msg) else: - raise ValueError("This array can not represent an image.") + msg = "This array can not represent an image." + raise ValueError(msg) else: raise ValueError("Invalid image type: " + str(type(im))) @@ -145,7 +147,8 @@ def writeIms(filename, images): # Check PIL if PIL is None: - raise RuntimeError("Need PIL to write series of image files.") + msg = "Need PIL to write series of image files." + raise RuntimeError(msg) # Check images images = checkImages(images) @@ -174,9 +177,7 @@ def writeIms(filename, images): def readIms(filename, asNumpy=True): - """readIms(filename, asNumpy=True) - - Read images from a series of images in a single directory. Returns a + """Read images from a series of images in a single directory. Returns a list of numpy arrays, or, if asNumpy is false, a list if PIL images. :param filename: @@ -185,11 +186,13 @@ def readIms(filename, asNumpy=True): # Check PIL if PIL is None: - raise RuntimeError("Need PIL to read a series of image files.") + msg = "Need PIL to read a series of image files." + raise RuntimeError(msg) - # Check Numpy + # Check NumPy if asNumpy and np is None: - raise RuntimeError("Need Numpy to return numpy arrays.") + msg = "Need NumPy to return numpy arrays." + raise RuntimeError(msg) # Get dirname and filename filename = os.path.abspath(filename) @@ -229,7 +232,8 @@ def readIms(filename, asNumpy=True): # Make numpy array a = np.asarray(im) if len(a.shape) == 0: - raise MemoryError("Too little memory to convert PIL image to array") + msg = "Too little memory to convert PIL image to array" + raise MemoryError(msg) # Add images.append(a) diff --git a/python/grass/imaging/images2swf.py b/python/grass/imaging/images2swf.py index 7d6f5a25f9b..1ea1a4e8413 100644 --- a/python/grass/imaging/images2swf.py +++ b/python/grass/imaging/images2swf.py @@ -24,7 +24,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -""" Module images2swf +"""Module images2swf Provides a function (writeSwf) to store a series of PIL images or numpy arrays in an SWF movie, that can be played on a wide range of OS's. @@ -92,8 +92,7 @@ def checkImages(images): - """checkImages(images) - Check numpy images and correct intensity range etc. + """Check numpy images and correct intensity range etc. The same for all movie formats. """ # Init results @@ -110,7 +109,7 @@ def checkImages(images): images2.append(im) # Ok elif im.dtype in [np.float32, np.float64]: theMax = im.max() - if theMax > 128 and theMax < 300: + if 128 < theMax < 300: pass # assume 0:255 else: im = im.copy() @@ -126,9 +125,11 @@ def checkImages(images): pass # ok elif im.ndim == 3: if im.shape[2] not in [3, 4]: - raise ValueError("This array can not represent an image.") + msg = "This array can not represent an image." + raise ValueError(msg) else: - raise ValueError("This array can not represent an image.") + msg = "This array can not represent an image." + raise ValueError(msg) else: raise ValueError("Invalid image type: " + str(type(im))) @@ -177,7 +178,8 @@ def Append(self, bits): if isinstance(bits, int): bits = str(bits) if not isinstance(bits, string_types): - raise ValueError("Append bits as strings or integers!") + msg = "Append bits as strings or integers!" + raise ValueError(msg) # add bits for bit in bits: @@ -240,7 +242,8 @@ def intToBits(i: int, n: int | None = None) -> BitArray: # justify if n is not None: if len(bb) > n: - raise ValueError("intToBits fail: len larger than padlength.") + msg = f"{intToBits.__name__} fail: len larger than padlength." + raise ValueError(msg) bb = str(bb).rjust(n, "0") # done @@ -319,7 +322,8 @@ def signedIntToBits(i: int, n: int | None = None) -> BitArray: bb = "0" + str(bb) # always need the sign bit in front if n is not None: if len(bb) > n: - raise ValueError("signedIntToBits fail: len larger than padlength.") + msg = f"{signedIntToBits.__name__} fail: len larger than padlength." + raise ValueError(msg) bb = bb.rjust(n, "0") # was it negative? (then opposite bits) @@ -361,7 +365,8 @@ def floatsToBits(arr): bits = intToBits(31, 5) # 32 does not fit in 5 bits! for i in arr: if i < 0: - raise ValueError("Dit not implement negative floats!") + msg = "Dit not implement negative floats!" + raise ValueError(msg) i1 = int(i) i2 = i - i1 bits += intToBits(i1, 15) @@ -480,6 +485,7 @@ class SetBackgroundTag(ControlTag): """Set the color in 0-255, or 0-1 (if floats given).""" def __init__(self, *rgb): + super().__init__() self.tagtype = 9 if len(rgb) == 1: rgb = rgb[0] @@ -551,14 +557,16 @@ def __init__(self, im): if im.shape[2] == 4: tmp[:, :, 0] = im[:, :, 3] # swap channel where alpha is in else: - raise ValueError("Invalid shape to be an image.") + msg = "Invalid shape to be an image." + raise ValueError(msg) elif len(im.shape) == 2: tmp = np.ones((im.shape[0], im.shape[1], 4), dtype=np.uint8) * 255 for i in range(3): tmp[:, :, i + 1] = im[:, :] else: - raise ValueError("Invalid shape to be an image.") + msg = "Invalid shape to be an image." + raise ValueError(msg) # we changed the image to uint8 4 channels. # now compress! @@ -772,22 +780,25 @@ def writeSwf(filename, images, duration=0.1, repeat=True): """ - # Check Numpy + # Check NumPy if np is None: - raise RuntimeError("Need Numpy to write an SWF file.") + msg = "Need NumPy to write an SWF file." + raise RuntimeError(msg) - # Check images (make all Numpy) + # Check images (make all NumPy) images2 = [] images = checkImages(images) if not images: - raise ValueError("Image list is empty!") + msg = "Image list is empty!" + raise ValueError(msg) for im in images: if PIL and isinstance(im, PIL.Image.Image): if im.mode == "P": im = im.convert() im = np.asarray(im) if len(im.shape) == 0: - raise MemoryError("Too little memory to convert PIL image to array") + msg = "Too little memory to convert PIL image to array" + raise MemoryError(msg) images2.append(im) # Init @@ -798,7 +809,8 @@ def writeSwf(filename, images, duration=0.1, repeat=True): if len(duration) == len(images2): duration = list(duration) else: - raise ValueError("len(duration) doesn't match amount of images.") + msg = "len(duration) doesn't match amount of images." + raise ValueError(msg) else: duration = [duration for im in images2] @@ -836,9 +848,10 @@ def writeSwf(filename, images, duration=0.1, repeat=True): def _readPixels(bb, i, tagType, L1): """With pf's seed after the recordheader, reads the pixeldata.""" - # Check Numpy + # Check NumPy if np is None: - raise RuntimeError("Need Numpy to read an SWF file.") + msg = "Need NumPy to read an SWF file." + raise RuntimeError(msg) # Get info # charId = bb[i : i + 2] # unused @@ -898,11 +911,13 @@ def readSwf(filename, asNumpy=True): # Check PIL if (not asNumpy) and (PIL is None): - raise RuntimeError("Need PIL to return as PIL images.") + msg = "Need PIL to return as PIL images." + raise RuntimeError(msg) - # Check Numpy + # Check NumPy if np is None: - raise RuntimeError("Need Numpy to read SWF files.") + msg = "Need NumPy to read SWF files." + raise RuntimeError(msg) # Init images images = [] @@ -972,9 +987,7 @@ def readSwf(filename, asNumpy=True): # Convert to normal PIL images if needed if not asNumpy: images2 = images - images = [] - for im in images2: - images.append(PIL.Image.fromarray(im)) + images = [PIL.Image.fromarray(im) for im in images2] # Done return images diff --git a/python/grass/jupyter/__init__.py b/python/grass/jupyter/__init__.py index 21223c2cd47..0db59ca2ea7 100644 --- a/python/grass/jupyter/__init__.py +++ b/python/grass/jupyter/__init__.py @@ -13,7 +13,7 @@ # License (>=v2). Read the file COPYING that comes with GRASS # for details. -"""The *grass.jupyter* is a convenient GRASS GIS interface for Jupyter notebooks. +"""A convenient GRASS GIS interface for Jupyter notebooks. Python is a great tool for data science and scientific computing. Jupyter_ is an environment with computational notebooks which makes it even better tool for @@ -72,7 +72,7 @@ .. image:: https://mybinder.org/badge_logo.svg :target: - https://mybinder.org/v2/gh/OSGeo/grass/main?urlpath=lab%2Ftree%2Fdoc%2Fnotebooks%2Fjupyter_example.ipynb + https://mybinder.org/v2/gh/OSGeo/grass/main?urlpath=lab%2Ftree%2Fdoc%2Fexamples%2Fnotebooks%2Fjupyter_example.ipynb There are also internal classes and functions which are not guaranteed to have as stable API, although they are available through their specific submodules. @@ -103,7 +103,7 @@ .. _Jupyter: https://jupyter.org/ .. _wiki: https://grasswiki.osgeo.org/wiki/GRASS_GIS_Jupyter_notebooks -.. _GitHub: https://github.com/OSGeo/grass/blob/main/doc/notebooks/jupyter_example.ipynb +.. _GitHub: https://github.com/OSGeo/grass/blob/main/doc/examples/notebooks/jupyter_example.ipynb """ from .interactivemap import InteractiveMap, Raster, Vector diff --git a/python/grass/jupyter/baseseriesmap.py b/python/grass/jupyter/baseseriesmap.py index 4497b16593d..00ee394d119 100644 --- a/python/grass/jupyter/baseseriesmap.py +++ b/python/grass/jupyter/baseseriesmap.py @@ -239,9 +239,7 @@ def save( if not self._layers_rendered: self.render() - input_files = [] - for index in self._indices: - input_files.append(self._base_filename_dict[index]) + input_files = [self._base_filename_dict[index] for index in self._indices] save_gif( input_files, diff --git a/python/grass/jupyter/map.py b/python/grass/jupyter/map.py index 013ff693470..dc78be31208 100644 --- a/python/grass/jupyter/map.py +++ b/python/grass/jupyter/map.py @@ -163,7 +163,8 @@ def run(self, module, **kwargs): self._region_manager.adjust_rendering_size_from_region() gs.run_command(module, env=self._env, **kwargs) else: - raise ValueError("Module must begin with letter 'd'.") + msg = "Module must begin with letter 'd'." + raise ValueError(msg) def __getattr__(self, name): """Parse attribute to GRASS display module. Attribute should be in diff --git a/python/grass/jupyter/reprojection_renderer.py b/python/grass/jupyter/reprojection_renderer.py index d709b9e123c..db1dfd6b06f 100644 --- a/python/grass/jupyter/reprojection_renderer.py +++ b/python/grass/jupyter/reprojection_renderer.py @@ -12,7 +12,7 @@ # for details. """Reprojects rasters to Pseudo-Mercator and vectors to WGS84. Exports reprojected - rasters and vectors to PNGs and geoJSONs, respectively.""" +rasters and vectors to PNGs and geoJSONs, respectively.""" import os import tempfile diff --git a/python/grass/jupyter/seriesmap.py b/python/grass/jupyter/seriesmap.py index ae6bde911b6..aaed821d1eb 100644 --- a/python/grass/jupyter/seriesmap.py +++ b/python/grass/jupyter/seriesmap.py @@ -158,9 +158,10 @@ def render(self): (i.e. show or save). """ if not self._baseseries_added: - raise RuntimeError( + msg = ( "Cannot render series since none has been added." "Use SeriesMap.add_rasters() or SeriesMap.add_vectors()" ) + raise RuntimeError(msg) tasks = [(i,) for i in range(self.baseseries)] self._render(tasks) diff --git a/python/grass/jupyter/setup.py b/python/grass/jupyter/setup.py index 0ed64739039..0d7edec9767 100644 --- a/python/grass/jupyter/setup.py +++ b/python/grass/jupyter/setup.py @@ -87,8 +87,8 @@ def switch_mapset(self, path, location=None, mapset=None): gisenv = gs.gisenv() if ( not location - and not mapset - and len(Path(path).parts) == 1 + and (not mapset) + and (len(Path(path).parts) == 1) and mapset_exists( path=gisenv["GISDBASE"], location=gisenv["LOCATION_NAME"], mapset=path ) diff --git a/python/grass/jupyter/timeseriesmap.py b/python/grass/jupyter/timeseriesmap.py index 2acfa575126..0379e19a4c0 100644 --- a/python/grass/jupyter/timeseriesmap.py +++ b/python/grass/jupyter/timeseriesmap.py @@ -170,7 +170,8 @@ def add_raster_series(self, baseseries, fill_gaps=False): :param bool fill_gaps: fill empty time steps with data from previous step """ if self._baseseries_added and self.baseseries != baseseries: - raise AttributeError("Cannot add more than one space time dataset") + msg = "Cannot add more than one space time dataset" + raise AttributeError(msg) self._element_type = "strds" check_timeseries_exists(baseseries, self._element_type) self.baseseries = baseseries @@ -193,7 +194,8 @@ def add_vector_series(self, baseseries, fill_gaps=False): :param bool fill_gaps: fill empty time steps with data from previous step """ if self._baseseries_added and self.baseseries != baseseries: - raise AttributeError("Cannot add more than one space time dataset") + msg = "Cannot add more than one space time dataset" + raise AttributeError(msg) self._element_type = "stvds" check_timeseries_exists(baseseries, self._element_type) self.baseseries = baseseries @@ -293,11 +295,12 @@ def _render_worker(self, date, layer, filename): def render(self): """Renders image for each time-step in space-time dataset.""" if not self._baseseries_added: - raise RuntimeError( + msg = ( "Cannot render space time dataset since none has been added." " Use TimeSeriesMap.add_raster_series() or " "TimeSeriesMap.add_vector_series() to add dataset" ) + raise RuntimeError(msg) # Create name for empty layers random_name_none = gs.append_random("none", 8) + ".png" diff --git a/python/grass/jupyter/utils.py b/python/grass/jupyter/utils.py index b06553d2d3b..c443d2590f2 100644 --- a/python/grass/jupyter/utils.py +++ b/python/grass/jupyter/utils.py @@ -11,6 +11,7 @@ # for details. """Utility functions warpping existing processes in a suitable way""" +from collections.abc import Mapping import tempfile import json import os @@ -303,7 +304,9 @@ def query_vector(coord, vector_list, distance): return _style_table(final_output) -def estimate_resolution(raster, mapset, location, dbase, env): +def estimate_resolution( + raster: str, mapset: str, location: str, dbase: str, env: Mapping +) -> float: """Estimates resolution of reprojected raster. :param str raster: name of raster @@ -326,9 +329,9 @@ def estimate_resolution(raster, mapset, location, dbase, env): ).strip() params = gs.parse_key_val(output, vsep=" ") output = gs.read_command("g.region", flags="ug", env=env, **params) - output = gs.parse_key_val(output, val_type=float) - cell_ns = (output["n"] - output["s"]) / output["rows"] - cell_ew = (output["e"] - output["w"]) / output["cols"] + keyval = gs.parse_key_val(output, val_type=float) + cell_ns = (keyval["n"] - keyval["s"]) / keyval["rows"] + cell_ew = (keyval["e"] - keyval["w"]) / keyval["cols"] return (cell_ew + cell_ns) / 2.0 diff --git a/python/grass/pydispatch/saferef.py b/python/grass/pydispatch/saferef.py index 90769c46b36..bbbd7dd518d 100644 --- a/python/grass/pydispatch/saferef.py +++ b/python/grass/pydispatch/saferef.py @@ -164,12 +164,6 @@ def __nonzero__(self): __bool__ = __nonzero__ - def __cmp__(self, other): - """Compare with another reference""" - if not isinstance(other, self.__class__): - return cmp(self.__class__, type(other)) - return cmp(self.key, other.key) - def __call__(self): """Return a strong reference to the bound method diff --git a/python/grass/pydispatch/signal.py b/python/grass/pydispatch/signal.py index 51808e01ee3..12e73e71882 100644 --- a/python/grass/pydispatch/signal.py +++ b/python/grass/pydispatch/signal.py @@ -266,8 +266,7 @@ def __call__(self, *args, **kwargs): Traceback (most recent call last): TypeError: mywrite() takes exactly 1 argument (0 given) """ - if "signal" in kwargs: - del kwargs["signal"] + kwargs.pop("signal", None) self.emit(*args, **kwargs) diff --git a/python/grass/pygrass/gis/__init__.py b/python/grass/pygrass/gis/__init__.py index 29fecd3d699..e34abc405c4 100644 --- a/python/grass/pygrass/gis/__init__.py +++ b/python/grass/pygrass/gis/__init__.py @@ -113,9 +113,11 @@ def make_mapset(mapset, location=None, gisdbase=None): :type gisdbase: str""" res = libgis.G_make_mapset(gisdbase, location, mapset) if res == -1: - raise GrassError("Cannot create new mapset") + msg = "Cannot create new mapset" + raise GrassError(msg) if res == -2: - raise GrassError("Illegal name") + msg = "Illegal name" + raise GrassError(msg) class Gisdbase: @@ -171,7 +173,8 @@ def __iter__(self): # TODO remove or complete this function def new_location(self): if libgis.G_make_location() != 0: - raise GrassError("Cannot create new location") + msg = "Cannot create new location" + raise GrassError(msg) def locations(self): """Return a list of locations that are available in the gisdbase: :: @@ -269,7 +272,7 @@ def mapsets(self, pattern=None, permissions=True): [...] """ - mapsets = [mapset for mapset in self] # noqa: C416 + mapsets = [mapset for mapset in self] # noqa: C416 # pylint: disable=R1721 if permissions: mapsets = [ mapset @@ -402,7 +405,8 @@ def current(self): def delete(self): """Delete the mapset""" if self.is_current(): - raise GrassError("The mapset is in use.") + msg = "The mapset is in use." + raise GrassError(msg) shutil.rmtree(self.path()) def path(self): @@ -459,7 +463,8 @@ def add(self, mapset): with open(self.spath, "a+") as f: f.write("%s\n" % mapset) else: - raise TypeError("Mapset not found") + msg = "Mapset not found" + raise TypeError(msg) def remove(self, mapset): """Remove mapset to the search path @@ -501,10 +506,11 @@ def reset(self): doctest.testmod() - # Remove the generated vector map, if exist mset = utils.get_mapset_vector(test_vector_name, mapset="") if mset: + # Remove the generated vector map, if exists run_command("g.remove", flags="f", type="vector", name=test_vector_name) mset = utils.get_mapset_raster(test_raster_name, mapset="") if mset: + # Remove the generated raster map, if exists run_command("g.remove", flags="f", type="raster", name=test_raster_name) diff --git a/python/grass/pygrass/gis/region.py b/python/grass/pygrass/gis/region.py index 892a3664b49..b1544c13664 100644 --- a/python/grass/pygrass/gis/region.py +++ b/python/grass/pygrass/gis/region.py @@ -448,7 +448,8 @@ def from_rast(self, raster_name): .. """ if not raster_name: - raise ValueError("Raster name or mapset are invalid") + msg = "Raster name or mapset are invalid" + raise ValueError(msg) mapset = get_mapset_raster(raster_name) @@ -602,7 +603,8 @@ def write(self): """ self.adjust() if libgis.G_put_window(self.byref()) < 0: - raise GrassError("Cannot change region (WIND file).") + msg = "Cannot change region (WIND file)." + raise GrassError(msg) def read_default(self): """ @@ -672,7 +674,7 @@ def set_bbox(self, bbox): doctest.testmod() - """Remove the generated vector map, if exist""" + # Remove the generated vector map, if exists mset = utils.get_mapset_vector(test_vector_name, mapset="") if mset: run_command("g.remove", flags="f", type="vector", name=test_vector_name) diff --git a/python/grass/pygrass/messages/__init__.py b/python/grass/pygrass/messages/__init__.py index b5fe052883c..cf83a7722c4 100644 --- a/python/grass/pygrass/messages/__init__.py +++ b/python/grass/pygrass/messages/__init__.py @@ -339,7 +339,7 @@ def test_fatal_error(self, message: str) -> None: def get_msgr( - _instance=[ + instance=[ None, ], *args, @@ -357,9 +357,9 @@ def get_msgr( >>> msgr0 is msgr2 False """ - if not _instance[0]: - _instance[0] = Messenger(*args, **kwargs) - return _instance[0] + if not instance[0]: + instance[0] = Messenger(*args, **kwargs) + return instance[0] if __name__ == "__main__": diff --git a/python/grass/pygrass/modules/grid/grid.py b/python/grass/pygrass/modules/grid/grid.py index ffae173ca79..5df1cb3aca7 100644 --- a/python/grass/pygrass/modules/grid/grid.py +++ b/python/grass/pygrass/modules/grid/grid.py @@ -319,12 +319,10 @@ def get_cmd(cmdd): >>> get_cmd(slp.get_dict()) # doctest: +ELLIPSIS ['r.slope.aspect', 'elevation=ele', 'format=degrees', ..., '--o'] """ - cmd = [ + return [ cmdd["name"], - ] - cmd.extend(("%s=%s" % (k, v) for k, v in cmdd["inputs"] if not isinstance(v, list))) - cmd.extend( - ( + *("%s=%s" % (k, v) for k, v in cmdd["inputs"] if not isinstance(v, list)), + *( "%s=%s" % ( k, @@ -332,21 +330,16 @@ def get_cmd(cmdd): ) for k, vals in cmdd["inputs"] if isinstance(vals, list) - ) - ) - cmd.extend( - ("%s=%s" % (k, v) for k, v in cmdd["outputs"] if not isinstance(v, list)) - ) - cmd.extend( - ( + ), + *("%s=%s" % (k, v) for k, v in cmdd["outputs"] if not isinstance(v, list)), + *( "%s=%s" % (k, ",".join([repr(v) for v in vals])) for k, vals in cmdd["outputs"] if isinstance(vals, list) - ) - ) - cmd.extend(f"-{flg}" for flg in cmdd["flags"] if len(flg) == 1) - cmd.extend(f"--{flg[0]}" for flg in cmdd["flags"] if len(flg) > 1) - return cmd + ), + *(f"-{flg}" for flg in cmdd["flags"] if len(flg) == 1), + *(f"--{flg[0]}" for flg in cmdd["flags"] if len(flg) > 1), + ] def cmd_exe(args): @@ -385,10 +378,7 @@ def cmd_exe(args): sub.Popen(["g.region", "raster=%s" % key], shell=shell, env=env).wait() else: # set the computational region - lcmd = [ - "g.region", - ] - lcmd.extend(["%s=%s" % (k, v) for k, v in bbox.items()]) + lcmd = ["g.region", *["%s=%s" % (k, v) for k, v in bbox.items()]] sub.Popen(lcmd, shell=shell, env=env).wait() if groups: copy_groups(groups, gisrc_src, gisrc_dst) @@ -582,7 +572,7 @@ def estimate_tile_size(self): self.height = ceil(region.rows / self.processes) def get_works(self): - """Return a list of tuble with the parameters for cmd_exe function""" + """Return a list of tuples with the parameters for cmd_exe function""" works = [] reg = Region() if self.move: @@ -597,13 +587,17 @@ def get_works(self): if self.inlist: inms = {} cols = len(box_row) + + indx = row * cols + col for key in self.inlist: - indx = row * cols + col inms[key] = "%s@%s" % (self.inlist[key][indx], self.mset.name) # set the computational region, prepare the region parameters - bbox = {k[0]: str(v) for k, v in box.items()[:-2]} - bbox["nsres"] = "%f" % reg.nsres - bbox["ewres"] = "%f" % reg.ewres + bbox = { + **{k[0]: str(v) for k, v in box.items()[:-2]}, + "nsres": "%f" % reg.nsres, + "ewres": "%f" % reg.ewres, + } + new_mset = ( self.msetstr % (self.start_row + row, self.start_col + col), ) diff --git a/python/grass/pygrass/modules/grid/split.py b/python/grass/pygrass/modules/grid/split.py index cac604327d8..17aa4905467 100644 --- a/python/grass/pygrass/modules/grid/split.py +++ b/python/grass/pygrass/modules/grid/split.py @@ -99,10 +99,9 @@ def split_region_in_overlapping_tiles(region=None, width=100, height=100, overla box_list = [] # print reg for row in range(nrows): - row_list = [] - for col in range(ncols): - # print 'c', c, 'r', r - row_list.append(get_bbox(reg, row, col, width, height, overlap)) + row_list = [ + get_bbox(reg, row, col, width, height, overlap) for col in range(ncols) + ] box_list.append(row_list) return box_list @@ -123,9 +122,10 @@ def split_region_tiles(region=None, width=100, height=100): nrows = (reg.rows + height - 1) // height box_list = [] for row in range(nrows): - row_list = [] - for col in range(ncols): - row_list.append(get_tile_start_end_row_col(reg, row, col, width, height)) + row_list = [ + get_tile_start_end_row_col(reg, row, col, width, height) + for col in range(ncols) + ] box_list.append(row_list) return box_list @@ -146,11 +146,9 @@ def get_overlap_region_tiles(region=None, width=100, height=100, overlap=0): ncols = (reg.cols + width - 1) // width nrows = (reg.rows + height - 1) // height box_list = [] - # print reg for row in range(nrows): - row_list = [] - for col in range(ncols): - # print 'c', c, 'r', r - row_list.append(get_bbox(reg, row, col, width, height, -overlap)) + row_list = [ + get_bbox(reg, row, col, width, height, -overlap) for col in range(ncols) + ] box_list.append(row_list) return box_list diff --git a/python/grass/pygrass/modules/interface/docstring.py b/python/grass/pygrass/modules/interface/docstring.py index 397338fcbb8..86c556559bf 100644 --- a/python/grass/pygrass/modules/interface/docstring.py +++ b/python/grass/pygrass/modules/interface/docstring.py @@ -47,7 +47,9 @@ def __get__(self, obj, type=None): return self.fget(obj) def __set__(self, obj, value): - raise AttributeError("can't set attribute") + msg = "Can't set attribute" + raise AttributeError(msg) def __delete__(self, obj): - raise AttributeError("can't delete attribute") + msg = "Can't delete attribute" + raise AttributeError(msg) diff --git a/python/grass/pygrass/modules/interface/env.py b/python/grass/pygrass/modules/interface/env.py index d3d6e371238..e355a5af30e 100644 --- a/python/grass/pygrass/modules/interface/env.py +++ b/python/grass/pygrass/modules/interface/env.py @@ -12,7 +12,8 @@ def get_env(): """Parse the GISRC file and return the GRASS variales""" gisrc = os.environ.get("GISRC") if gisrc is None: - raise RuntimeError("You are not in a GRASS session, GISRC not found.") + msg = "You are not in a GRASS session, GISRC not found." + raise RuntimeError(msg) with open(gisrc) as grc: return { k.strip(): v.strip() for k, v in [row.split(":", 1) for row in grc if row] diff --git a/python/grass/pygrass/modules/interface/module.py b/python/grass/pygrass/modules/interface/module.py index 7f70d82bac1..eebb081cada 100644 --- a/python/grass/pygrass/modules/interface/module.py +++ b/python/grass/pygrass/modules/interface/module.py @@ -548,7 +548,8 @@ def __init__(self, cmd, *args, **kargs): if isinstance(cmd, str): self.name = cmd else: - raise GrassError("Problem initializing the module {s}".format(s=cmd)) + msg = "Problem initializing the module {s}".format(s=cmd) + raise GrassError(msg) try: # call the command with --interface-description get_cmd_xml = Popen([cmd, "--interface-description"], stdout=PIPE) @@ -773,12 +774,12 @@ def get_dict(self): """Return a dictionary that includes the name, all valid inputs, outputs and flags """ - dic = {} - dic["name"] = self.name - dic["inputs"] = [(k, v.value) for k, v in self.inputs.items() if v.value] - dic["outputs"] = [(k, v.value) for k, v in self.outputs.items() if v.value] - dic["flags"] = [flg for flg in self.flags if self.flags[flg].value] - return dic + return { + "name": self.name, + "inputs": [(k, v.value) for k, v in self.inputs.items() if v.value], + "outputs": [(k, v.value) for k, v in self.outputs.items() if v.value], + "flags": [flg for flg in self.flags if self.flags[flg].value], + } def make_cmd(self): """Create the command string that can be executed in a shell diff --git a/python/grass/pygrass/modules/interface/parameter.py b/python/grass/pygrass/modules/interface/parameter.py index d950cd40008..4c9b8571250 100644 --- a/python/grass/pygrass/modules/interface/parameter.py +++ b/python/grass/pygrass/modules/interface/parameter.py @@ -112,10 +112,11 @@ def check_string(value): good = True break if not good: - raise ValueError( - f"The Parameter <{param.name}>, must be one of the following " + msg = ( + f"The parameter <{param.name}>, must be one of the following " f"values: {param.values!r} not '{newvalue}'" ) + raise ValueError(msg) return ( ( [ @@ -148,7 +149,7 @@ class Parameter: >>> param.value = 3 Traceback (most recent call last): ... - ValueError: The Parameter , must be one of the following values: [2, 4, 6, 8] not '3' + ValueError: The parameter , must be one of the following values: [2, 4, 6, 8] not '3' ... """ # noqa: E501 @@ -160,7 +161,8 @@ def __init__(self, xparameter=None, diz=None): self.max = None diz = element2dict(xparameter) if xparameter is not None else diz if diz is None: - raise TypeError("Xparameter or diz are required") + msg = "xparameter or diz are required" + raise TypeError(msg) self.name = diz["name"] self.required = diz["required"] == "yes" self.multiple = diz["multiple"] == "yes" diff --git a/python/grass/pygrass/modules/interface/typedict.py b/python/grass/pygrass/modules/interface/typedict.py index fb3ddb368eb..3fe9d59aabf 100644 --- a/python/grass/pygrass/modules/interface/typedict.py +++ b/python/grass/pygrass/modules/interface/typedict.py @@ -62,8 +62,4 @@ def __reduce__(self): ) def used(self): - key_dict = {} - for key in self: - if getattr(self, key): - key_dict[key] = getattr(self, key) - return key_dict + return {key: getattr(self, key) for key in self if getattr(self, key)} diff --git a/python/grass/pygrass/raster/__init__.py b/python/grass/pygrass/raster/__init__.py index 61ec85841cb..cf3ab0f234d 100644 --- a/python/grass/pygrass/raster/__init__.py +++ b/python/grass/pygrass/raster/__init__.py @@ -13,6 +13,7 @@ libgis.G_gisinit("") +# flake8: noqa: E402 # import pygrass modules from grass.pygrass.errors import must_be_open from grass.pygrass.gis.region import Region @@ -25,6 +26,8 @@ from grass.pygrass.raster.segment import Segment from grass.pygrass.raster.rowio import RowIO +# flake8: qa + WARN_OVERWRITE = "Raster map <{0}> already exists and will be overwritten" test_raster_name = "Raster_test_map" @@ -130,9 +133,6 @@ class RasterRow(RasterAbstractBase): """ - def __init__(self, name, mapset="", *args, **kargs): - super().__init__(name, mapset, *args, **kargs) - # mode = "r", method = "row", @must_be_open def get_row(self, row, row_buffer=None): @@ -214,7 +214,8 @@ def open(self, mode=None, mtype=None, overwrite=None): raise OpenError(_("Raster type not defined")) self._fd = libraster.Rast_open_new(self.name, self._gtype) else: - raise OpenError("Open mode: %r not supported, valid mode are: r, w") + msg = "Open mode: %r not supported, valid mode are: r, w" + raise OpenError(msg) # read rows and cols from the active region self._rows = libraster.Rast_window_rows() self._cols = libraster.Rast_window_cols() @@ -336,7 +337,8 @@ def __setitem__(self, key, row): if key >= self._rows: raise IndexError(_("Index out of range: %r.") % key) return self.put_row(key, row) - raise TypeError("Invalid argument type.") + msg = "Invalid argument type." + raise TypeError(msg) @must_be_open def map2segment(self): @@ -743,10 +745,11 @@ def numpy2raster(array, mtype, rastname, overwrite=False): doctest.testmod() - """Remove the generated vector map, if exist""" mset = utils.get_mapset_raster(test_raster_name, mapset="") if mset: + # Remove the generated vector map, if exists Module("g.remove", flags="f", type="raster", name=test_raster_name) mset = utils.get_mapset_raster(test_raster_name + "_segment", mapset="") if mset: + # Remove the generated raster map, if exists Module("g.remove", flags="f", type="raster", name=test_raster_name + "_segment") diff --git a/python/grass/pygrass/raster/abstract.py b/python/grass/pygrass/raster/abstract.py index 68b5e501ed4..a27350171f1 100644 --- a/python/grass/pygrass/raster/abstract.py +++ b/python/grass/pygrass/raster/abstract.py @@ -398,15 +398,15 @@ def __getitem__(self, key): return self.get(x, y) if isinstance(key, int): if not self.is_open(): - raise IndexError("Can not operate on a closed map. Call open() first.") + msg = "Can not operate on a closed map. Call open() first." + raise IndexError(msg) if key < 0: # Handle negative indices key += self._rows if key >= self._rows: - raise IndexError( - "The row index {0} is out of range [0, {1}).".format( - key, self._rows - ) + msg = "The row index {0} is out of range [0, {1}).".format( + key, self._rows ) + raise IndexError(msg) return self.get_row(key) fatal("Invalid argument type.") @@ -631,7 +631,7 @@ def set_cat(self, label, min_cat, max_cat=None, index=None): doctest.testmod() - """Remove the generated vector map, if exist""" mset = utils.get_mapset_raster(test_raster_name, mapset="") if mset: + # Remove the generated vector map, if exists Module("g.remove", flags="f", type="raster", name=test_raster_name) diff --git a/python/grass/pygrass/raster/buffer.py b/python/grass/pygrass/raster/buffer.py index 0e34ac84159..67ba86563bc 100644 --- a/python/grass/pygrass/raster/buffer.py +++ b/python/grass/pygrass/raster/buffer.py @@ -45,8 +45,8 @@ def __array_finalize__(self, obj): def __array_wrap__(self, out_arr, context=None): """See: - http://docs.scipy.org/doc/numpy/user/ - basics.subclassing.html#array-wrap-for-ufuncs""" + https://numpy.org/doc/stable/user/basics.subclassing.html#array-wrap-for-ufuncs-and-other-functions + """ if out_arr.dtype == bool: # there is not support for boolean maps, so convert into integer out_arr = out_arr.astype(np.int32) diff --git a/python/grass/pygrass/raster/category.py b/python/grass/pygrass/raster/category.py index 9c34bb8390f..20fca524da5 100644 --- a/python/grass/pygrass/raster/category.py +++ b/python/grass/pygrass/raster/category.py @@ -107,7 +107,7 @@ def __repr__(self): return "[{0}]".format(",\n ".join(cats)) def _chk_index(self, index): - if type(index) == str: + if isinstance(index, str): try: index = self.labels().index(index) except ValueError: @@ -115,15 +115,17 @@ def _chk_index(self, index): return index def _chk_value(self, value): - if type(value) == tuple: + if isinstance(value, tuple): length = len(value) if length == 2: label, min_cat = value value = (label, min_cat, None) elif length < 2 or length > 3: - raise TypeError("Tuple with a length that is not supported.") + msg = "Tuple with a length that is not supported." + raise TypeError(msg) else: - raise TypeError("Only Tuple are supported.") + msg = "Only tuples are supported." + raise TypeError(msg) return value def __getitem__(self, index): @@ -204,7 +206,8 @@ def set_cat(self, index, value): elif index < (len(self)): self[index] = value else: - raise TypeError("Index outside range.") + msg = "Index outside range." + raise TypeError(msg) def reset(self): for i in range(len(self) - 1, -1, -1): @@ -244,7 +247,8 @@ def read(self): self.name, self.mapset, ctypes.byref(self.c_cats) ) if err == -1: - raise GrassError("Can not read the categories.") + msg = "Can not read the categories." + raise GrassError(msg) # copy from C struct to list self._read_cats() @@ -280,7 +284,8 @@ def set_cats_fmt(self, fmt, m1, a1, m2, a2): void Rast_set_cats_fmt() """ # TODO: add - raise ImplementationError("set_cats_fmt() is not implemented yet.") + msg = f"{self.set_cats_fmt.__name__}() is not implemented yet." + raise ImplementationError(msg) def read_rules(self, filename, sep=":"): """Copy categories from a rules file, default separator is ':', the @@ -308,7 +313,8 @@ def read_rules(self, filename, sep=":"): elif len(cat) == 3: label, min_cat, max_cat = cat else: - raise TypeError("Row length is greater than 3") + msg = "Row length is greater than 3" + raise TypeError(msg) self.append((label, min_cat, max_cat)) def write_rules(self, filename, sep=":"): diff --git a/python/grass/pygrass/raster/rowio.py b/python/grass/pygrass/raster/rowio.py index ed43c9a8452..a49da7efe26 100644 --- a/python/grass/pygrass/raster/rowio.py +++ b/python/grass/pygrass/raster/rowio.py @@ -64,7 +64,8 @@ def open(self, fd, rows, cols, mtype): ) == -1 ): - raise GrassError("Fatal error, Rowio not setup correctly.") + msg = f"Fatal error, {RowIO.__name__} not setup correctly." + raise GrassError(msg) def release(self): librowio.Rowio_release(ctypes.byref(self.c_rowio)) diff --git a/python/grass/pygrass/raster/testsuite/test_category.py b/python/grass/pygrass/raster/testsuite/test_category.py index 113264c82f6..a00525a4108 100644 --- a/python/grass/pygrass/raster/testsuite/test_category.py +++ b/python/grass/pygrass/raster/testsuite/test_category.py @@ -54,7 +54,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - """Remove the generated vector map, if exist""" + """Remove the generated vector map, if exists""" cls.runModule("g.remove", flags="f", type="raster", name=cls.name) cls.del_temp_region() diff --git a/python/grass/pygrass/raster/testsuite/test_history.py b/python/grass/pygrass/raster/testsuite/test_history.py index c369ac8da02..4ec01fb3103 100644 --- a/python/grass/pygrass/raster/testsuite/test_history.py +++ b/python/grass/pygrass/raster/testsuite/test_history.py @@ -35,7 +35,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - """Remove the generated vector map, if exist""" + """Remove the generated vector map, if exists""" cls.runModule("g.remove", flags="f", type="raster", name=cls.name) cls.del_temp_region() diff --git a/python/grass/pygrass/raster/testsuite/test_numpy.py b/python/grass/pygrass/raster/testsuite/test_numpy.py index b23926d8ba7..fb83ab61493 100644 --- a/python/grass/pygrass/raster/testsuite/test_numpy.py +++ b/python/grass/pygrass/raster/testsuite/test_numpy.py @@ -38,7 +38,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - """Remove the generated vector map, if exist""" + """Remove the generated vector map, if exists""" cls.runModule("g.remove", flags="f", type="raster", name=cls.name) cls.del_temp_region() diff --git a/python/grass/pygrass/raster/testsuite/test_pygrass_raster.py b/python/grass/pygrass/raster/testsuite/test_pygrass_raster.py index 2f20de582f4..393ff42fc6f 100644 --- a/python/grass/pygrass/raster/testsuite/test_pygrass_raster.py +++ b/python/grass/pygrass/raster/testsuite/test_pygrass_raster.py @@ -21,7 +21,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - """Remove the generated vector map, if exist""" + """Remove the generated vector map, if exists""" cls.runModule("g.remove", flags="f", type="raster", name=cls.name) cls.del_temp_region() diff --git a/python/grass/pygrass/raster/testsuite/test_raster_img.py b/python/grass/pygrass/raster/testsuite/test_raster_img.py index b873b16a86b..8781dd20d46 100644 --- a/python/grass/pygrass/raster/testsuite/test_raster_img.py +++ b/python/grass/pygrass/raster/testsuite/test_raster_img.py @@ -37,7 +37,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - """Remove the generated vector map, if exist""" + """Remove the generated vector map, if exists""" cls.runModule("g.remove", flags="f", type="raster", name=cls.name) cls.del_temp_region() diff --git a/python/grass/pygrass/raster/testsuite/test_raster_region.py b/python/grass/pygrass/raster/testsuite/test_raster_region.py index cb8cc8c30c8..259c16c11f7 100644 --- a/python/grass/pygrass/raster/testsuite/test_raster_region.py +++ b/python/grass/pygrass/raster/testsuite/test_raster_region.py @@ -22,7 +22,7 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - """Remove the generated vector map, if exist""" + """Remove the generated vector map, if exists""" cls.runModule("g.remove", flags="f", type="raster", name=cls.name) cls.del_temp_region() @@ -65,16 +65,14 @@ def test_resampling_2(self): rast.set_region(region) rast.open(mode="r") - """ - [nan, nan, nan, nan, nan, nan, nan, nan] - [nan, nan, nan, nan, nan, nan, nan, nan] - [nan, nan, 11.0, 21.0, 31.0, 41.0, nan, nan] - [nan, nan, 12.0, 22.0, 32.0, 42.0, nan, nan] - [nan, nan, 13.0, 23.0, 33.0, 43.0, nan, nan] - [nan, nan, 14.0, 24.0, 34.0, 44.0, nan, nan] - [nan, nan, nan, nan, nan, nan, nan, nan] - [nan, nan, nan, nan, nan, nan, nan, nan] - """ + # [nan, nan, nan, nan, nan, nan, nan, nan] + # [nan, nan, nan, nan, nan, nan, nan, nan] + # [nan, nan, 11.0, 21.0, 31.0, 41.0, nan, nan] + # [nan, nan, 12.0, 22.0, 32.0, 42.0, nan, nan] + # [nan, nan, 13.0, 23.0, 33.0, 43.0, nan, nan] + # [nan, nan, 14.0, 24.0, 34.0, 44.0, nan, nan] + # [nan, nan, nan, nan, nan, nan, nan, nan] + # [nan, nan, nan, nan, nan, nan, nan, nan] self.assertCountEqual(rast[2].tolist()[2:6], [11.0, 21.0, 31.0, 41.0]) self.assertCountEqual(rast[5].tolist()[2:6], [14.0, 24.0, 34.0, 44.0]) diff --git a/python/grass/pygrass/rpc/__init__.py b/python/grass/pygrass/rpc/__init__.py index e95c3453250..a0737288d88 100644 --- a/python/grass/pygrass/rpc/__init__.py +++ b/python/grass/pygrass/rpc/__init__.py @@ -116,9 +116,8 @@ def _get_vector_table_as_dict(lock, conn, data): table = layer.table_to_dict(where=where) layer.close() - ret = {} - ret["table"] = table - ret["columns"] = columns + ret = {"table": table, "columns": columns} + finally: # Send even if an exception was raised. conn.send(ret) @@ -478,7 +477,7 @@ def get_vector_features_as_wkb_list( doctest.testmod() - """Remove the generated maps, if exist""" + # Remove the generated maps, if exist mset = utils.get_mapset_raster(test_raster_name, mapset="") if mset: Module("g.remove", flags="f", type="raster", name=test_raster_name) diff --git a/python/grass/pygrass/rpc/base.py b/python/grass/pygrass/rpc/base.py index c436300c170..5b811d2a7f7 100644 --- a/python/grass/pygrass/rpc/base.py +++ b/python/grass/pygrass/rpc/base.py @@ -25,6 +25,10 @@ from multiprocessing.connection import Connection from multiprocessing.synchronize import _LockLike + +logger: logging.Logger = logging.getLogger(__name__) + + ############################################################################### @@ -45,7 +49,8 @@ def dummy_server(lock: _LockLike, conn: Connection) -> NoReturn: conn.close() sys.exit() if data[0] == 1: - raise Exception("Server process intentionally killed by exception") + msg = "Server process intentionally killed by exception" + raise Exception(msg) class RPCServerBase: @@ -134,7 +139,7 @@ def thread_checker(self): def start_server(self): """This function must be re-implemented in the subclasses""" - logging.debug("Start the libgis server") + logger.debug("Start the libgis server") self.client_conn, self.server_conn = Pipe(True) self.lock = Lock() @@ -147,7 +152,7 @@ def check_server(self): def _check_restart_server(self, caller="main thread") -> None: """Restart the server if it was terminated""" - logging.debug("Check libgis server restart") + logger.debug("Check libgis server restart") with self.threadLock: if self.server is not None and self.server.is_alive() is True: @@ -159,9 +164,9 @@ def _check_restart_server(self, caller="main thread") -> None: self.start_server() if self.stopped is not True: - logging.warning( - "Needed to restart the libgis server, caller: {caller}", - caller=caller, + logger.warning( + "Needed to restart the libgis server, caller: %(caller)s", + {"caller": caller}, ) self.stopped = False @@ -169,7 +174,8 @@ def _check_restart_server(self, caller="main thread") -> None: def safe_receive(self, message): """Receive the data and throw a FatalError exception in case the server process was killed and the pipe was closed by the checker thread""" - logging.debug("Receive message: {message}") + if logger.isEnabledFor(logging.DEBUG): + logger.debug("Receive message: %s", message) try: ret = self.client_conn.recv() @@ -186,7 +192,7 @@ def stop(self): This method should be called at exit using the package atexit """ - logging.debug("Stop libgis server") + logger.debug("Stop libgis server") self.stop_checker_thread() if self.server is not None and self.server.is_alive(): diff --git a/python/grass/pygrass/tests/benchmark.py b/python/grass/pygrass/tests/benchmark.py index fa7f0c01a65..f2915ff60d7 100644 --- a/python/grass/pygrass/tests/benchmark.py +++ b/python/grass/pygrass/tests/benchmark.py @@ -15,14 +15,19 @@ from jinja2 import Template from pathlib import Path + sys.path.append(str(Path.cwd())) sys.path.append("%s/.." % (str(Path.cwd()))) + +# flake8: noqa: E402 import grass.lib.gis as libgis import grass.lib.raster as libraster import grass.script as gs import ctypes +# flake8: qa + def test__RasterSegment_value_access__if(): test_a = pygrass.RasterSegment(name="test_a") @@ -380,7 +385,7 @@ class OptionWithDefault(optparse.Option): ATTRS = optparse.Option.ATTRS + [strREQUIRED] def __init__(self, *opts, **attrs): - if attrs.get(strREQUIRED, False): + if attrs.get(strREQUIRED): attrs["help"] = "(Required) " + attrs.get("help", "") optparse.Option.__init__(self, *opts, **attrs) diff --git a/python/grass/pygrass/utils.py b/python/grass/pygrass/utils.py index d8804665b1e..b28f01e3fa0 100644 --- a/python/grass/pygrass/utils.py +++ b/python/grass/pygrass/utils.py @@ -1,19 +1,21 @@ -import itertools import fnmatch +import itertools import os from sqlite3 import OperationalError import grass.lib.gis as libgis +from grass.script import core as grasscore +from grass.script import utils as grassutils +# flake8: noqa: E402 libgis.G_gisinit("") import grass.lib.raster as libraster from grass.lib.ctypes_preamble import String -from grass.script import core as grasscore -from grass.script import utils as grassutils - from grass.pygrass.errors import GrassError +# flake8: qa + test_vector_name = "Utils_test_vector" test_raster_name = "Utils_test_raster" @@ -72,10 +74,11 @@ def find_in_location(type, pattern, location): return res def find_in_gisdbase(type, pattern, gisdbase): - res = [] - for loc in gisdbase.locations(): - res.extend(find_in_location(type, pattern, Location(loc, gisdbase.name))) - return res + return [ + a + for loc in gisdbase.locations() + for a in find_in_location(type, pattern, Location(loc, gisdbase.name)) + ] if gisdbase and location and mapset: mset = Mapset(mapset, location, gisdbase) @@ -185,7 +188,7 @@ def is_clean_name(name) -> bool: False """ - return not libgis.G_legal_filename(name) < 0 + return libgis.G_legal_filename(name) >= 0 def coor2pixel(coord, region): @@ -352,7 +355,8 @@ def r_export(rast, output="", fmt="png", **kargs): **kargs, ) return output - raise ValueError("Raster map does not exist.") + msg = "Raster map does not exist." + raise ValueError(msg) def get_lib_path(modname, libname=None): @@ -461,7 +465,7 @@ def create_test_vector_map(map_name="test_vector"): """ from grass.pygrass.vector import VectorTopo - from grass.pygrass.vector.geometry import Point, Line, Centroid, Boundary + from grass.pygrass.vector.geometry import Boundary, Centroid, Line, Point cols = [ ("cat", "INTEGER PRIMARY KEY"), @@ -589,6 +593,7 @@ def create_test_stream_network_map(map_name="streams"): if __name__ == "__main__": import doctest + from grass.script.core import run_command create_test_vector_map(test_vector_name) @@ -597,10 +602,11 @@ def create_test_stream_network_map(map_name="streams"): doctest.testmod() - # Remove the generated vector map, if exist mset = get_mapset_vector(test_vector_name, mapset="") if mset: + # Remove the generated vector map, if exists run_command("g.remove", flags="f", type="vector", name=test_vector_name) mset = get_mapset_raster(test_raster_name, mapset="") if mset: + # Remove the generated raster map, if exists run_command("g.remove", flags="f", type="raster", name=test_raster_name) diff --git a/python/grass/pygrass/vector/__init__.py b/python/grass/pygrass/vector/__init__.py index 6723b2e44b3..696abb84532 100644 --- a/python/grass/pygrass/vector/__init__.py +++ b/python/grass/pygrass/vector/__init__.py @@ -1,22 +1,26 @@ from os.path import join, exists import grass.lib.gis as libgis +import ctypes + +# flake8: noqa: E402 libgis.G_gisinit("") import grass.lib.vector as libvect -import ctypes - -# import pygrass modules from grass.pygrass.vector.vector_type import VTYPE from grass.pygrass.errors import GrassError, must_be_open from grass.pygrass.gis import Location - -from grass.pygrass.vector.geometry import GEOOBJ as _GEOOBJ -from grass.pygrass.vector.geometry import read_line, read_next_line -from grass.pygrass.vector.geometry import Area as _Area +from grass.pygrass.vector.geometry import ( + GEOOBJ as _GEOOBJ, + read_line, + read_next_line, + Area as _Area, +) from grass.pygrass.vector.abstract import Info from grass.pygrass.vector.basic import Bbox, Cats, Ilist +# flake8: qa + _NUMOF = { "areas": libvect.Vect_get_num_areas, @@ -113,7 +117,8 @@ def next(self): def rewind(self): """Rewind vector map to cause reads to start at beginning.""" if libvect.Vect_rewind(self.c_mapinfo) == -1: - raise GrassError("Vect_rewind raise an error.") + msg = "Vect_rewind raise an error." + raise GrassError(msg) @must_be_open def write(self, geo_obj, cat=None, attrs=None): @@ -205,10 +210,7 @@ def write(self, geo_obj, cat=None, attrs=None): if cat is not None and cat not in self._cats: self._cats.append(cat) if self.table is not None and attrs is not None: - attr = [ - cat, - ] - attr.extend(attrs) + attr = [cat, *attrs] cur = self.table.conn.cursor() cur.execute(self.table.columns.insert_str, attr) cur.close() @@ -224,7 +226,8 @@ def write(self, geo_obj, cat=None, attrs=None): self.c_mapinfo, geo_obj.gtype, geo_obj.c_points, geo_obj.c_cats ) if result == -1: - raise GrassError("Not able to write the vector feature.") + msg = "Not able to write the vector feature." + raise GrassError(msg) if self._topo_level == 2: # return new feature id (on level 2) geo_obj.id = result @@ -397,10 +400,7 @@ def number_of(self, vtype): @must_be_open def num_primitives(self): """Return dictionary with the number of all primitives""" - output = {} - for prim in VTYPE.keys(): - output[prim] = self.num_primitive_of(prim) - return output + return {prim: self.num_primitive_of(prim) for prim in VTYPE.keys()} @must_be_open def viter(self, vtype, idonly=False): @@ -641,7 +641,8 @@ def rewrite(self, geo_obj, cat, attrs=None, **kargs): self.c_mapinfo, cat, geo_obj.gtype, geo_obj.c_points, geo_obj.c_cats ) if result == -1: - raise GrassError("Not able to write the vector feature.") + msg = "Not able to write the vector feature." + raise GrassError(msg) # return offset into file where the feature starts geo_obj.offset = result @@ -654,7 +655,8 @@ def delete(self, feature_id): :type feature_id: int """ if libvect.Vect_rewrite_line(self.c_mapinfo, feature_id) == -1: - raise GrassError("C function: Vect_rewrite_line.") + msg = "C function: Vect_rewrite_line." + raise GrassError(msg) @must_be_open def restore(self, geo_obj): @@ -663,16 +665,19 @@ def restore(self, geo_obj): libvect.Vect_restore_line(self.c_mapinfo, geo_obj.offset, geo_obj.id) == -1 ): - raise GrassError("C function: Vect_restore_line.") + msg = "C function: Vect_restore_line." + raise GrassError(msg) else: - raise ValueError("The value have not an offset attribute.") + msg = "The value have not an offset attribute." + raise ValueError(msg) @must_be_open def bbox(self): - """Return the BBox of the vecor map""" + """Return the BBox of the vector map""" bbox = Bbox() if libvect.Vect_get_map_box(self.c_mapinfo, bbox.c_bbox) == 0: - raise GrassError("I can not find the Bbox.") + msg = "I can not find the Bbox." + raise GrassError(msg) return bbox def close(self, build=True, release=True): @@ -966,10 +971,10 @@ def areas_to_wkb_list(self, bbox=None, field=1): utils.create_test_vector_map(test_vector_name) doctest.testmod() - """Remove the generated vector map, if exist""" from grass.pygrass.utils import get_mapset_vector from grass.script.core import run_command mset = get_mapset_vector(test_vector_name, mapset="") if mset: + # Remove the generated vector map, if exists run_command("g.remove", flags="f", type="vector", name=test_vector_name) diff --git a/python/grass/pygrass/vector/abstract.py b/python/grass/pygrass/vector/abstract.py index 79307f0d354..00bef3997f2 100644 --- a/python/grass/pygrass/vector/abstract.py +++ b/python/grass/pygrass/vector/abstract.py @@ -284,7 +284,8 @@ def rename(self, newname): if not self.is_open(): utils.rename(self.name, newname, "vect") else: - raise GrassError("The map is open, not able to renamed it.") + msg = "The map is open, not able to rename it." + raise GrassError(msg) self._name = newname def is_3D(self): @@ -360,12 +361,14 @@ def open( if not self.exist() and self.mode != "w": raise OpenError("Map <%s> not found." % self._name) if libvect.Vect_set_open_level(self._topo_level) != 0: - raise OpenError("Invalid access level.") + msg = "Invalid access level." + raise OpenError(msg) # update the overwrite attribute self.overwrite = overwrite if overwrite is not None else self.overwrite # check if the mode is valid if self.mode not in {"r", "rw", "w"}: - raise ValueError("Mode not supported. Use one of: 'r', 'rw', 'w'.") + msg = "Mode not supported. Use one of: 'r', 'rw', 'w'." + raise ValueError(msg) # check if the map exist if self.exist() and self.mode in {"r", "rw"}: diff --git a/python/grass/pygrass/vector/basic.py b/python/grass/pygrass/vector/basic.py index f031e23ff98..86eed054340 100644 --- a/python/grass/pygrass/vector/basic.py +++ b/python/grass/pygrass/vector/basic.py @@ -311,13 +311,15 @@ def __getitem__(self, key): if key < 0: # Handle negative indices key += self.c_ilist.contents.n_values if key >= self.c_ilist.contents.n_values: - raise IndexError("Index out of range") + msg = "Index out of range" + raise IndexError(msg) return self.c_ilist.contents.value[key] raise ValueError("Invalid argument type: %r." % key) def __setitem__(self, key, value): if self.contains(value): - raise ValueError("Integer already in the list") + msg = "Integer already in the list" + raise ValueError(msg) self.c_ilist.contents.value[key] = int(value) def __len__(self): diff --git a/python/grass/pygrass/vector/find.py b/python/grass/pygrass/vector/find.py index 25ce9aca8f4..97a9d2bc261 100644 --- a/python/grass/pygrass/vector/find.py +++ b/python/grass/pygrass/vector/find.py @@ -4,28 +4,39 @@ @author: pietro """ -import grass.lib.vector as libvect +from __future__ import annotations + +from typing import TYPE_CHECKING +import grass.lib.vector as libvect from grass.pygrass.errors import must_be_open +from grass.pygrass.vector.basic import BoxList, Ilist +from grass.pygrass.vector.geometry import Area, Isle, Node, read_line -from grass.pygrass.vector.basic import Ilist, BoxList -from grass.pygrass.vector.geometry import read_line, Isle, Area, Node +if TYPE_CHECKING: + from grass.pygrass.vector.table import Table # For test purposes test_vector_name = "find_doctest_map" class AbstractFinder: - def __init__(self, c_mapinfo, table=None, writeable=False): - """Abstract finder - ----------------- - Find geometry feature around a point. + def __init__( + self, c_mapinfo, table: Table | None = None, writeable: bool = False + ) -> None: + """Find geometry feature(s) around a point or that are inside or intersect + with a bounding box. + + :param c_mapinfo: Pointer to the vector layer mapinfo structure + :type c_mapinfo: ctypes pointer to mapinfo structure + :param table: Attribute table of the vector layer + :param writable: True or False """ self.c_mapinfo = c_mapinfo - self.table = table - self.writeable = writeable - self.vtype = { + self.table: Table | None = table + self.writeable: bool = writeable + self.vtype: dict[str, int] = { "point": libvect.GV_POINT, # 1 "line": libvect.GV_LINE, # 2 "boundary": libvect.GV_BOUNDARY, # 3 @@ -33,7 +44,7 @@ def __init__(self, c_mapinfo, table=None, writeable=False): "all": -1, } - def is_open(self): + def is_open(self) -> bool: """Check if the vector map is open or not""" from . import abstract @@ -48,20 +59,6 @@ class PointFinder(AbstractFinder): is part of a topological vector map object. """ - def __init__(self, c_mapinfo, table=None, writeable=False): - """Find geometry feature(s) around a point. - - :param c_mapinfo: Pointer to the vector layer mapinfo structure - :type c_mapinfo: ctypes pointer to mapinfo structure - - :param table: Attribute table of the vector layer - :type table: Class Table from grass.pygrass.table - - :param writable: True or False - :type writeable: boolean - """ - super().__init__(c_mapinfo, table, writeable) - @must_be_open def node(self, point, maxdist): """Find the nearest node around a specific point. @@ -397,21 +394,6 @@ class BboxFinder(AbstractFinder): """ - def __init__(self, c_mapinfo, table=None, writeable=False): - """Find geometry feature(s)that are insider or intersect - with a boundingbox. - - :param c_mapinfo: Pointer to the vector layer mapinfo structure - :type c_mapinfo: ctypes pointer to mapinfo structure - - :param table: Attribute table of the vector layer - :type table: Class Table from grass.pygrass.table - - :param writable: True or False - :type writeable: boolean - """ - super().__init__(c_mapinfo, table, writeable) - @must_be_open def geos(self, bbox, type="all", bboxlist_only=False): """Find vector features inside a boundingbox. @@ -662,9 +644,6 @@ def islands(self, bbox, bboxlist_only=False): class PolygonFinder(AbstractFinder): - def __init__(self, c_mapinfo, table=None, writeable=False): - super().__init__(c_mapinfo, table, writeable) - def lines(self, polygon, isles=None): pass @@ -674,15 +653,16 @@ def areas(self, polygon, isles=None): if __name__ == "__main__": import doctest + from grass.pygrass import utils utils.create_test_vector_map(test_vector_name) doctest.testmod() - """Remove the generated vector map, if exist""" from grass.pygrass.utils import get_mapset_vector from grass.script.core import run_command mset = get_mapset_vector(test_vector_name, mapset="") if mset: + # Remove the generated vector map, if exists run_command("g.remove", flags="f", type="vector", name=test_vector_name) diff --git a/python/grass/pygrass/vector/geometry.py b/python/grass/pygrass/vector/geometry.py index b734161d1ea..e0cae2c4fc0 100644 --- a/python/grass/pygrass/vector/geometry.py +++ b/python/grass/pygrass/vector/geometry.py @@ -221,10 +221,7 @@ def __setitem__(self, keys, values): def __dict__(self): """Return a dict of the attribute table row.""" - dic = {} - for key, val in zip(self.keys(), self.values()): - dic[key] = val - return dic + return dict(zip(self.keys(), self.values())) def values(self): """Return the values of the attribute table row. @@ -611,7 +608,8 @@ def buffer( dist_x = dist dist_y = dist elif not dist_x or not dist_y: - raise TypeError("TypeError: buffer expected 1 arguments, got 0") + msg = "buffer expected 1 arguments, got 0" + raise TypeError(msg) bound = Line() p_points = ctypes.pointer(bound.c_points) libvect.Vect_point_buffer2( @@ -675,7 +673,8 @@ def __getitem__(self, key): if key < 0: # Handle negative indices key += self.c_points.contents.n_points if key >= self.c_points.contents.n_points: - raise IndexError("Index out of range") + msg = "Index out of range" + raise IndexError(msg) return Point( self.c_points.contents.x[key], self.c_points.contents.y[key], @@ -743,7 +742,8 @@ def point_on_line(self, distance, angle=0, slope=0): ctypes.pointer(ctypes.c_double(angle)), ctypes.pointer(ctypes.c_double(slope)), ): - raise ValueError("Vect_point_on_line give an error.") + msg = "Vect_point_on_line gave an error." + raise ValueError(msg) pnt.is2D = self.is2D return pnt @@ -845,7 +845,8 @@ def insert(self, indx, pnt): if indx < 0: # Handle negative indices indx += self.c_points.contents.n_points if indx >= self.c_points.contents.n_points: - raise IndexError("Index out of range") + msg = "Index out of range" + raise IndexError(msg) x, y, z = get_xyz(pnt) libvect.Vect_line_insert_point(self.c_points, indx, x, y, z) @@ -949,7 +950,8 @@ def pop(self, indx): if indx < 0: # Handle negative indices indx += self.c_points.contents.n_points if indx >= self.c_points.contents.n_points: - raise IndexError("Index out of range") + msg = "Index out of range" + raise IndexError(msg) pnt = self[indx] libvect.Vect_line_delete_point(self.c_points, indx) return pnt @@ -968,7 +970,8 @@ def delete(self, indx): if indx < 0: # Handle negative indices indx += self.c_points.contents.n_points if indx >= self.c_points.contents.n_points: - raise IndexError("Index out of range") + msg = "Index out of range" + raise IndexError(msg) libvect.Vect_line_delete_point(self.c_points, indx) def prune(self): @@ -1024,7 +1027,8 @@ def remove(self, pnt): if pnt == point: libvect.Vect_line_delete_point(self.c_points, indx) return - raise ValueError("list.remove(x): x not in list") + msg = "list.remove(x): x not in list" + raise ValueError(msg) def reverse(self): """Reverse the order of vertices, using `Vect_line_reverse` @@ -1127,8 +1131,6 @@ def from_wkt(self, wkt): self.reset() for coord in match.groups()[0].strip().split(","): self.append(tuple(float(e) for e in coord.split(" "))) - else: - return None def buffer( self, @@ -1173,7 +1175,8 @@ def buffer( dist_x = dist dist_y = dist elif not dist_x or not dist_y: - raise TypeError("TypeError: buffer expected 1 arguments, got 0") + msg = "buffer expected 1 arguments, got 0" + raise TypeError(msg) p_bound = ctypes.pointer(ctypes.pointer(libvect.line_pnts())) pp_isle = ctypes.pointer(ctypes.pointer(ctypes.pointer(libvect.line_pnts()))) n_isles = ctypes.pointer(ctypes.c_int()) @@ -1296,7 +1299,8 @@ def to_wkb(self): TODO: Must be implemented """ - raise Exception("Not implemented") + msg = "Not implemented" + raise Exception(msg) def ilines(self, only_in=False, only_out=False): """Return a generator with all lines id connected to a node. @@ -1507,7 +1511,8 @@ def to_wkt(self): def to_wkb(self): """Return a "well know text" (WKB) geometry array. ::""" - raise Exception("Not implemented") + msg = "Not implemented" + raise Exception(msg) @mapinfo_must_be_set def points_geos(self): @@ -1723,7 +1728,8 @@ def buffer( dist_x = dist dist_y = dist elif not dist_x or not dist_y: - raise TypeError("TypeError: buffer expected 1 arguments, got 0") + msg = "buffer expected 1 arguments, got 0" + raise TypeError(msg) p_bound = ctypes.pointer(ctypes.pointer(libvect.line_pnts())) pp_isle = ctypes.pointer(ctypes.pointer(ctypes.pointer(libvect.line_pnts()))) n_isles = ctypes.pointer(ctypes.c_int()) @@ -1901,7 +1907,8 @@ def c_read_line(feature_id, c_mapinfo, c_points, c_cats): if feature_id < 0: # Handle negative indices feature_id += nmax + 1 if feature_id > nmax: - raise IndexError("Index out of range") + msg = "Index out of range" + raise IndexError(msg) if feature_id > 0: ftype = libvect.Vect_read_line(c_mapinfo, c_points, c_cats, feature_id) return feature_id, ftype, c_points, c_cats @@ -1953,10 +1960,10 @@ def read_line( utils.create_test_vector_map(test_vector_name) doctest.testmod() - """Remove the generated vector map, if exist""" from grass.pygrass.utils import get_mapset_vector from grass.script.core import run_command mset = get_mapset_vector(test_vector_name, mapset="") if mset: + # Remove the generated vector map, if exists run_command("g.remove", flags="f", type="vector", name=test_vector_name) diff --git a/python/grass/pygrass/vector/table.py b/python/grass/pygrass/vector/table.py index b83f751a0dd..ac52d31baad 100644 --- a/python/grass/pygrass/vector/table.py +++ b/python/grass/pygrass/vector/table.py @@ -133,7 +133,8 @@ def limit(self, number): :type number: int """ if not isinstance(number, int): - raise ValueError("Must be an integer.") + msg = "Must be an integer." + raise ValueError(msg) self._limit = "LIMIT {number}".format(number=number) return self @@ -264,9 +265,9 @@ def update_odict(self): """Read columns name and types from table and update the odict attribute. """ + cur = self.conn.cursor() if self.is_pg(): # is a postgres connection - cur = self.conn.cursor() cur.execute("SELECT oid,typname FROM pg_type") diz = dict(cur.fetchall()) odict = OrderedDict() @@ -280,17 +281,15 @@ def update_odict(self): odict[name] = diz[ctype] except pg.ProgrammingError: pass - self.odict = odict else: # is a sqlite connection - cur = self.conn.cursor() cur.execute(sql.PRAGMA.format(tname=self.tname)) descr = cur.fetchall() odict = OrderedDict() for column in descr: name, ctype = column[1:3] odict[name] = ctype - self.odict = odict + self.odict = odict values = ",".join( [ "?", @@ -362,11 +361,9 @@ def names(self, remove=None, unicod=True): ['cat', 'name', 'value'] """ + nams = list(self.odict.keys()) if remove: - nams = list(self.odict.keys()) nams.remove(remove) - else: - nams = list(self.odict.keys()) if unicod: return nams return [str(name) for name in nams] @@ -561,7 +558,8 @@ def cast(self, col_name, new_type): self.update_odict() else: # sqlite does not support rename columns: - raise DBError("SQLite does not support to cast columns.") + msg = "SQLite does not support to cast columns." + raise DBError(msg) def drop(self, col_name): """Drop a column from the table. @@ -663,7 +661,8 @@ def _get_layer(self): def _set_layer(self, number): if number <= 0: - raise TypeError("Number must be positive and greater than 0.") + msg = "Number must be positive and greater than 0." + raise TypeError(msg) self.c_fieldinfo.contents.number = number layer = property( @@ -824,7 +823,7 @@ def connection(self): if driver == "sqlite": import sqlite3 - # Numpy is using some custom integer data types to efficiently + # NumPy is using some custom integer data types to efficiently # pack data into memory. Since these types aren't familiar to # sqlite, you'll have to tell it about how to handle them. for t in ( @@ -1282,10 +1281,10 @@ def create(self, cols, name=None, overwrite=False, cursor=None): utils.create_test_vector_map(test_vector_name) doctest.testmod() - """Remove the generated vector map, if exist""" from grass.pygrass.utils import get_mapset_vector from grass.script.core import run_command mset = get_mapset_vector(test_vector_name, mapset="") if mset: + # Remove the generated vector map, if exists run_command("g.remove", flags="f", type="vector", name=test_vector_name) diff --git a/python/grass/pygrass/vector/testsuite/test_geometry.py b/python/grass/pygrass/vector/testsuite/test_geometry.py index 6957a473165..3dfd0e5c655 100644 --- a/python/grass/pygrass/vector/testsuite/test_geometry.py +++ b/python/grass/pygrass/vector/testsuite/test_geometry.py @@ -83,7 +83,7 @@ def test_eq(self): point1 = Point(1, 0) self.assertFalse(point0 == point1) self.assertFalse(point0 == (1, 0)) - self.assertTrue(point0 == point0) # noqa: PLR0124 + self.assertTrue(point0 == point0) # noqa: PLR0124 # pylint: disable=R0124 self.assertTrue(point0 == (0, 0)) def test_repr(self): @@ -117,7 +117,7 @@ def tearDownClass(cls): cls.vect.close() cls.c_mapinfo = None - """Remove the generated vector map, if exist""" + # Remove the generated vector map, if exists cls.runModule("g.remove", flags="f", type="vector", name=cls.tmpname) def test_len(self): @@ -207,7 +207,7 @@ def tearDownClass(cls): cls.vect.close() cls.c_mapinfo = None - """Remove the generated vector map, if exist""" + # Remove the generated vector map, if exists cls.runModule("g.remove", flags="f", type="vector", name=cls.tmpname) def test_init(self): @@ -264,7 +264,7 @@ def tearDownClass(cls): cls.vect.close() cls.c_mapinfo = None - """Remove the generated vector map, if exist""" + # Remove the generated vector map, if exists cls.runModule("g.remove", flags="f", type="vector", name=cls.tmpname) def test_init(self): diff --git a/python/grass/pygrass/vector/testsuite/test_geometry_attrs.py b/python/grass/pygrass/vector/testsuite/test_geometry_attrs.py index cba638b1901..8fa198bea0d 100644 --- a/python/grass/pygrass/vector/testsuite/test_geometry_attrs.py +++ b/python/grass/pygrass/vector/testsuite/test_geometry_attrs.py @@ -32,7 +32,7 @@ def tearDownClass(cls): cls.vect.close() cls.c_mapinfo = None - """Remove the generated vector map, if exist""" + # Remove the generated vector map, if exists cls.runModule("g.remove", flags="f", type="vector", name=cls.tmpname) def test_getitem(self): diff --git a/python/grass/pygrass/vector/testsuite/test_table.py b/python/grass/pygrass/vector/testsuite/test_table.py index c77233400b1..ecd4500c488 100644 --- a/python/grass/pygrass/vector/testsuite/test_table.py +++ b/python/grass/pygrass/vector/testsuite/test_table.py @@ -154,7 +154,6 @@ def setUp(self): self.cols = self.table.columns def tearDown(self): - """Remove the generated vector map, if exist""" self.table.drop(force=True) self.table = None self.cols = None @@ -176,7 +175,6 @@ def setUp(self): self.cols = self.table.columns def tearDown(self): - """Remove the generated vector map, if exist""" self.table.drop(force=True) self.table = None self.cols = None diff --git a/python/grass/pygrass/vector/testsuite/test_vector.py b/python/grass/pygrass/vector/testsuite/test_vector.py index b4eebb2ab1e..09baf02d9c3 100644 --- a/python/grass/pygrass/vector/testsuite/test_vector.py +++ b/python/grass/pygrass/vector/testsuite/test_vector.py @@ -28,7 +28,7 @@ def tearDownClass(cls): if cls.vect.is_open(): cls.vect.close() - """Remove the generated vector map, if exist""" + # Remove the generated vector map, if exists cls.runModule("g.remove", flags="f", type="vector", name=cls.tmpname) def test_getitem_slice(self): diff --git a/python/grass/pygrass/vector/testsuite/test_vector3d.py b/python/grass/pygrass/vector/testsuite/test_vector3d.py index 84e688d22e8..a928434c2a8 100644 --- a/python/grass/pygrass/vector/testsuite/test_vector3d.py +++ b/python/grass/pygrass/vector/testsuite/test_vector3d.py @@ -55,7 +55,7 @@ def test_writing_reading_points(self): @classmethod def tearDownClass(cls): - """Remove the generated vector map, if exist""" + """Remove the generated vector map, if exists""" cls.runModule("g.remove", flags="f", type="vector", name=cls.tmpname) diff --git a/python/grass/script/Makefile b/python/grass/script/Makefile index 2ca98db5041..888417f8ca6 100644 --- a/python/grass/script/Makefile +++ b/python/grass/script/Makefile @@ -5,7 +5,7 @@ include $(MODULE_TOPDIR)/include/Make/Python.make DSTDIR = $(ETC)/python/grass/script -MODULES = core db raster raster3d vector array setup task utils +MODULES = core db imagery raster raster3d vector array setup task utils PYFILES := $(patsubst %,$(DSTDIR)/%.py,$(MODULES) __init__) PYCFILES := $(patsubst %,$(DSTDIR)/%.pyc,$(MODULES) __init__) diff --git a/python/grass/script/__init__.py b/python/grass/script/__init__.py index ea82c2fe451..032a0eb67f3 100644 --- a/python/grass/script/__init__.py +++ b/python/grass/script/__init__.py @@ -1,10 +1,211 @@ """Python interface to launch GRASS GIS modules in scripts """ -from .core import * -from .db import * -from .raster import * -from .raster3d import * -from .vector import * -from .utils import * -from . import setup # noqa: F401 +from . import setup +from .core import ( + PIPE, + Popen, + call, + compare_key_value_text_files, + create_environment, + create_location, + create_project, + debug, + debug_level, + del_temp_region, + error, + exec_command, + fatal, + feed_command, + find_file, + find_program, + get_capture_stderr, + get_commands, + get_raise_on_error, + get_real_command, + gisenv, + handle_errors, + info, + legal_name, + list_grouped, + list_pairs, + list_strings, + locn_is_latlong, + make_command, + mapsets, + message, + named_colors, + overwrite, + parse_color, + parse_command, + parser, + percent, + pipe_command, + read_command, + region, + region_env, + run_command, + sanitize_mapset_environment, + set_capture_stderr, + set_raise_on_error, + start_command, + tempdir, + tempfile, + tempname, + use_temp_region, + verbose, + verbosity, + version, + warning, + write_command, +) +from .db import ( + db_begin_transaction, + db_commit_transaction, + db_connection, + db_describe, + db_select, + db_table_exist, + db_table_in_vector, +) +from .imagery import group_to_dict +from .raster import mapcalc, mapcalc_start, raster_history, raster_info, raster_what +from .raster3d import mapcalc3d, raster3d_info +from .utils import ( + KeyValue, + append_node_pid, + append_random, + append_uuid, + basename, + clock, + decode, + diff_files, + encode, + float_or_dms, + get_lib_path, + get_num_suffix, + legalize_vector_name, + natural_sort, + naturally_sorted, + parse_key_val, + separator, + set_path, + split, + text_to_string, + try_remove, + try_rmdir, +) +from .vector import ( + vector_columns, + vector_db, + vector_db_select, + vector_history, + vector_info, + vector_info_topo, + vector_layer_db, + vector_what, +) + +__all__ = [ + "PIPE", + "KeyValue", + "Popen", + "append_node_pid", + "append_random", + "append_uuid", + "basename", + "call", + "clock", + "compare_key_value_text_files", + "create_environment", + "create_location", + "create_project", + "db_begin_transaction", + "db_commit_transaction", + "db_connection", + "db_describe", + "db_select", + "db_table_exist", + "db_table_in_vector", + "debug", + "debug_level", + "decode", + "del_temp_region", + "diff_files", + "encode", + "error", + "exec_command", + "fatal", + "feed_command", + "find_file", + "find_program", + "float_or_dms", + "get_capture_stderr", + "get_commands", + "get_lib_path", + "get_num_suffix", + "get_raise_on_error", + "get_real_command", + "gisenv", + "group_to_dict", + "handle_errors", + "info", + "legal_name", + "legalize_vector_name", + "list_grouped", + "list_pairs", + "list_strings", + "locn_is_latlong", + "make_command", + "mapcalc", + "mapcalc3d", + "mapcalc_start", + "mapsets", + "message", + "named_colors", + "natural_sort", + "naturally_sorted", + "overwrite", + "parse_color", + "parse_command", + "parse_key_val", + "parser", + "percent", + "pipe_command", + "raster3d_info", + "raster_history", + "raster_info", + "raster_what", + "read_command", + "region", + "region_env", + "run_command", + "sanitize_mapset_environment", + "separator", + "set_capture_stderr", + "set_path", + "set_raise_on_error", + "setup", + "split", + "start_command", + "tempdir", + "tempfile", + "tempname", + "text_to_string", + "try_remove", + "try_rmdir", + "use_temp_region", + "vector_columns", + "vector_db", + "vector_db_select", + "vector_history", + "vector_info", + "vector_info_topo", + "vector_layer_db", + "vector_what", + "verbose", + "verbosity", + "version", + "warning", + "write_command", +] diff --git a/python/grass/script/array.py b/python/grass/script/array.py index 29c99d7617c..5f23bb6f022 100644 --- a/python/grass/script/array.py +++ b/python/grass/script/array.py @@ -131,6 +131,7 @@ def __del__(self): class array(np.memmap): + # pylint: disable-next=signature-differs; W0222 def __new__(cls, mapname=None, null=None, dtype=np.double, env=None): """Define new numpy array @@ -242,6 +243,7 @@ def write(self, mapname, title=None, null=None, overwrite=None, quiet=None): class array3d(np.memmap): + # pylint: disable-next=signature-differs; W0222 def __new__(cls, mapname=None, null=None, dtype=np.double, env=None): """Define new 3d numpy array diff --git a/python/grass/script/core.py b/python/grass/script/core.py index e6de4331b43..2a07219b461 100644 --- a/python/grass/script/core.py +++ b/python/grass/script/core.py @@ -8,7 +8,7 @@ from grass.script import core as grass grass.parser() -(C) 2008-2024 by the GRASS Development Team +(C) 2008-2025 by the GRASS Development Team This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. @@ -32,14 +32,24 @@ import json import csv import io +from collections.abc import Mapping from tempfile import NamedTemporaryFile from pathlib import Path +from typing import TYPE_CHECKING, TypeVar from .utils import KeyValue, parse_key_val, basename, encode, decode, try_remove from grass.exceptions import ScriptError, CalledModuleError from grass.grassdb.manage import resolve_mapset_path +if TYPE_CHECKING: + from _typeshed import StrPath + + +T = TypeVar("T") +_Env = Mapping[str, str] + + # subprocess wrapper that uses shell on Windows class Popen(subprocess.Popen): _builtin_exts = {".com", ".exe", ".bat", ".cmd"} @@ -53,7 +63,7 @@ def __init__(self, args, **kwargs): if ( sys.platform == "win32" and isinstance(args, list) - and not kwargs.get("shell", False) + and not kwargs.get("shell") and kwargs.get("executable") is None ): cmd = shutil.which(args[0]) @@ -270,7 +280,8 @@ def make_command( if flags: flags = _make_val(flags) if "-" in flags: - raise ScriptError("'-' is not a valid flag") + msg = "'-' is not a valid flag" + raise ScriptError(msg) args.append("-" + flags) for opt, val in options.items(): if opt in _popen_args: @@ -866,16 +877,16 @@ def _parse_opts(lines: list) -> tuple[dict[str, str], dict[str, bool]]: try: var, val = line.split(b"=", 1) except ValueError: - raise SyntaxError("invalid output from g.parser: {}".format(line)) + msg = "invalid output from g.parser: {}".format(line) + raise SyntaxError(msg) try: var = decode(var) val = decode(val) except UnicodeError as error: - raise SyntaxError( - "invalid output from g.parser ({error}): {line}".format( - error=error, line=line - ) + msg = "invalid output from g.parser ({error}): {line}".format( + error=error, line=line ) + raise SyntaxError(msg) if var.startswith("flag_"): flags[var[5:]] = bool(int(val)) elif var.startswith("opt_"): @@ -883,9 +894,8 @@ def _parse_opts(lines: list) -> tuple[dict[str, str], dict[str, bool]]: elif var in {"GRASS_OVERWRITE", "GRASS_VERBOSE"}: os.environ[var] = val else: - raise SyntaxError( - "unexpected output variable from g.parser: {}".format(line) - ) + msg = "unexpected output variable from g.parser: {}".format(line) + raise SyntaxError(msg) return (options, flags) @@ -1032,20 +1042,22 @@ def _compare_units(dic): def _text_to_key_value_dict( - filename, sep=":", val_sep=",", checkproj=False, checkunits=False -): + filename: StrPath, + sep: str = ":", + val_sep: str = ",", + checkproj: bool = False, + checkunits: bool = False, +) -> KeyValue[list[int | float | str]]: """Convert a key-value text file, where entries are separated by newlines and the key and value are separated by `sep', into a key-value dictionary and discover/use the correct data types (float, int or string) for values. - :param str filename: The name or name and path of the text file to convert - :param str sep: The character that separates the keys and values, default - is ":" - :param str val_sep: The character that separates the values of a single + :param filename: The name or name and path of the text file to convert + :param sep: The character that separates the keys and values, default is ":" + :param val_sep: The character that separates the values of a single key, default is "," - :param bool checkproj: True if it has to check some information about - projection system - :param bool checkproj: True if it has to check some information about units + :param checkproj: True if it has to check some information about projection system + :param checkunits: True if it has to check some information about units :return: The dictionary @@ -1064,8 +1076,9 @@ def _text_to_key_value_dict( {'a': ['Hello'], 'c': [1, 2, 3, 4, 5], 'b': [1.0], 'd': ['hello', 8, 0.1]} """ - text = open(filename).readlines() - kvdict = KeyValue() + with Path(filename).open() as f: + text = f.readlines() + kvdict: KeyValue[list[int | float | str]] = KeyValue() for line in text: if line.find(sep) >= 0: @@ -1076,7 +1089,7 @@ def _text_to_key_value_dict( # Jump over empty values continue values = value.split(val_sep) - value_list = [] + value_list: list[int | float | str] = [] for value in values: not_float = False @@ -1172,7 +1185,7 @@ def compare_key_value_text_files( # interface to g.gisenv -def gisenv(env=None): +def gisenv(env: _Env | None = None) -> KeyValue[str | None]: """Returns the output from running g.gisenv (with no arguments), as a dictionary. Example: @@ -1190,14 +1203,14 @@ def gisenv(env=None): # interface to g.region -def locn_is_latlong(env=None) -> bool: +def locn_is_latlong(env: _Env | None = None) -> bool: """Tests if location is lat/long. Value is obtained by checking the "g.region -pu" projection code. :return: True for a lat/long region, False otherwise """ s = read_command("g.region", flags="pu", env=env) - kv = parse_key_val(s, ":") + kv: KeyValue[str | None] = parse_key_val(s, ":") return kv["projection"].split(" ")[0] == "3" @@ -1245,7 +1258,9 @@ def region(region3d=False, complete=False, env=None): return reg -def region_env(region3d=False, flags=None, env=None, **kwargs): +def region_env( + region3d: bool = False, flags: str | None = None, env: _Env | None = None, **kwargs +) -> str: """Returns region settings as a string which can used as GRASS_REGION environmental variable. @@ -1255,8 +1270,8 @@ def region_env(region3d=False, flags=None, env=None, **kwargs): See also :func:`use_temp_region()` for alternative method how to define temporary region used for raster-based computation. - :param bool region3d: True to get 3D region - :param string flags: for example 'a' + :param region3d: True to get 3D region + :param flags: for example 'a' :param env: dictionary with system environment variables (`os.environ` by default) :param kwargs: g.region's parameters like 'raster', 'vector' or 'region' @@ -1270,7 +1285,7 @@ def region_env(region3d=False, flags=None, env=None, **kwargs): :return: empty string on error """ # read proj/zone from WIND file - gis_env = gisenv(env) + gis_env: KeyValue[str | None] = gisenv(env) windfile = os.path.join( gis_env["GISDBASE"], gis_env["LOCATION_NAME"], gis_env["MAPSET"], "WIND" ) @@ -1449,20 +1464,19 @@ def list_strings(type, pattern=None, mapset=None, exclude=None, flag="", env=Non if type == "cell": verbose(_('Element type should be "raster" and not "%s"') % type, env=env) - result = [] - for line in read_command( - "g.list", - quiet=True, - flags="m" + flag, - type=type, - pattern=pattern, - exclude=exclude, - mapset=mapset, - env=env, - ).splitlines(): - result.append(line.strip()) - - return result + return [ + line.strip() + for line in read_command( + "g.list", + quiet=True, + flags="m" + flag, + type=type, + pattern=pattern, + exclude=exclude, + mapset=mapset, + env=env, + ).splitlines() + ] def list_pairs(type, pattern=None, mapset=None, exclude=None, flag="", env=None): @@ -1575,7 +1589,7 @@ def list_grouped( # color parsing -named_colors = { +named_colors: dict[str, tuple[float, float, float]] = { "white": (1.00, 1.00, 1.00), "black": (0.00, 0.00, 0.00), "red": (1.00, 0.00, 0.00), @@ -1595,7 +1609,9 @@ def list_grouped( } -def parse_color(val, dflt=None): +def parse_color( + val: str, dflt: tuple[float, float, float] | None = None +) -> tuple[float, float, float] | None: """Parses the string "val" as a GRASS colour, which can be either one of the named colours or an R:G:B tuple e.g. 255:255:255. Returns an (r,g,b) triple whose components are floating point values between 0 @@ -1614,9 +1630,9 @@ def parse_color(val, dflt=None): if val in named_colors: return named_colors[val] - vals = val.split(":") + vals: list[str] = val.split(":") if len(vals) == 3: - return tuple(float(v) / 255 for v in vals) + return (float(vals[0]) / 255, float(vals[1]) / 255, float(vals[2]) / 255) return dflt @@ -1678,14 +1694,13 @@ def find_program(pgm, *args): or non-zero return code :return: True otherwise """ - nuldev = open(os.devnull, "w+") - try: - # TODO: the doc or impl is not correct, any return code is accepted - call([pgm] + list(args), stdin=nuldev, stdout=nuldev, stderr=nuldev) - found = True - except Exception: - found = False - nuldev.close() + with open(os.devnull, "w+") as nuldev: + try: + # TODO: the doc or impl is not correct, any return code is accepted + call([pgm] + list(args), stdin=nuldev, stdout=nuldev, stderr=nuldev) + found = True + except Exception: + found = False return found @@ -1800,6 +1815,7 @@ def create_project( if datum_trans: kwargs["datum_trans"] = datum_trans + ps = None if epsg: ps = pipe_command( "g.proj", @@ -1855,7 +1871,7 @@ def create_project( else: _create_location_xy(mapset_path.directory, mapset_path.location) - if epsg or proj4 or filename or wkt: + if ps is not None and (epsg or proj4 or filename or wkt): error = ps.communicate(stdin)[1] try_remove(tmp_gisrc) @@ -1868,16 +1884,15 @@ def create_project( def _set_location_description(path, location, text): """Set description (aka title aka MYNAME) for a location""" try: - fd = codecs.open( + with codecs.open( os.path.join(path, location, "PERMANENT", "MYNAME"), encoding="utf-8", mode="w", - ) - if text: - fd.write(text + os.linesep) - else: - fd.write(os.linesep) - fd.close() + ) as fd: + if text: + fd.write(text + os.linesep) + else: + fd.write(os.linesep) except OSError as e: raise ScriptError(repr(e)) @@ -1893,8 +1908,11 @@ def _create_location_xy(database, location): cur_dir = Path.cwd() try: os.chdir(database) + permanent_dir = Path(location, "PERMANENT") + default_wind_path = permanent_dir / "DEFAULT_WIND" + wind_path = permanent_dir / "WIND" os.mkdir(location) - os.mkdir(os.path.join(location, "PERMANENT")) + permanent_dir.mkdir() # create DEFAULT_WIND and WIND files regioninfo = [ @@ -1918,16 +1936,8 @@ def _create_location_xy(database, location): "t-b resol: 1", ] - defwind = open(os.path.join(location, "PERMANENT", "DEFAULT_WIND"), "w") - for param in regioninfo: - defwind.write(param + "%s" % os.linesep) - defwind.close() - - shutil.copy( - os.path.join(location, "PERMANENT", "DEFAULT_WIND"), - os.path.join(location, "PERMANENT", "WIND"), - ) - + default_wind_path.write_text("\n".join(regioninfo)) + shutil.copy(default_wind_path, wind_path) os.chdir(cur_dir) except OSError as e: raise ScriptError(repr(e)) diff --git a/python/grass/script/db.py b/python/grass/script/db.py index 5591b92d4ca..0ae4fe6b53f 100644 --- a/python/grass/script/db.py +++ b/python/grass/script/db.py @@ -127,9 +127,8 @@ def db_connection(force=False, env=None): :return: parsed output of db.connect """ # noqa: E501 try: - nuldev = open(os.devnull, "w") - conn = parse_command("db.connect", flags="g", stderr=nuldev, env=env) - nuldev.close() + with open(os.devnull, "w") as nuldev: + conn = parse_command("db.connect", flags="g", stderr=nuldev, env=env) except CalledModuleError: conn = None diff --git a/python/grass/script/imagery.py b/python/grass/script/imagery.py new file mode 100644 index 00000000000..c0531541409 --- /dev/null +++ b/python/grass/script/imagery.py @@ -0,0 +1,152 @@ +""" +Imagery related functions to be used in Python scripts. + +Usage: + +:: + + import grass.script as gs + + gs.imagery.group_to_dict(imagery_group) + ... + +(C) 2024 by Stefan Blumentrath and the GRASS Development Team +This program is free software under the GNU General Public +License (>=v2). Read the file COPYING that comes with GRASS +for details. + +.. sectionauthor:: Stefan Blumentrath +""" + +from .core import read_command, warning, fatal +from .raster import raster_info + + +def group_to_dict( + imagery_group_name, + subgroup=None, + dict_keys="semantic_labels", + dict_values="map_names", + fill_semantic_label=True, + env=None, +): + """Create a dictionary to represent an imagery group with metadata. + + Depending on the dict_keys option, the returned dictionary uses either + the names of the raster maps ("map_names"), their row indices in the group + ("indices") or their associated semantic_labels ("semantic_labels") as keys. + The default is to use semantic_labels. Note that map metadata + of the maps in the group have to be read to get the semantic label, + in addition to the group file. The same metadata is read when the + "metadata" is requested as dict_values. Other supported dict_values + are "map_names" (default), "semantic_labels", or "indices". + + The function can also operate on the level of subgroups. In case a + non-existing (or empty sub-group) is requested a warning is printed + and an empty dictionary is returned (following the behavior of i.group). + + Example:: + + >>> run_command("g.copy", raster="lsat7_2000_10,lsat7_2000_10") + >>> run_command("r.support", raster="lsat7_2000_10", semantic_label="L8_1") + >>> run_command("g.copy", raster="lsat7_2000_20,lsat7_2000_20") + >>> run_command("r.support", raster="lsat7_2000_20", semantic_label="L8_2") + >>> run_command("g.copy", raster="lsat7_2000_30,lsat7_2000_30") + >>> run_command("r.support", raster="lsat7_2000_30", semantic_label="L8_3") + >>> run_command("i.group", group="L8_group", + >>> input="lsat7_2000_10,lsat7_2000_20,lsat7_2000_30") + >>> group_to_dict("L8_group") # doctest: +ELLIPSIS + {"L8_1": "lsat7_2000_10", ... "L8_3": "lsat7_2000_30"} + >>> run_command("g.remove", flags="f", type="group", name="L8_group") + >>> run_command("g.remove", flags="f", type="raster", + >>> name="lsat7_2000_10,lsat7_2000_20,lsat7_2000_30") + + :param str imagery_group_name: Name of the imagery group to process (or None) + :param str subgroup: Name of the imagery sub-group to process (or None) + :param str dict_keys: What to use as key for dictionary. It can be either + "semantic_labels" (default), "map_names" or "indices" + :param str dict_values: What to use as values for dictionary. It can be either + "map_names" (default), "semanic_labels", "indices" or + "metadata" (to return dictionaries with full map metadata) + :param bool fill_semantic_label: If maps in a group do not have a semantic + label, their index in the group is used + instead (default). Otherwise None / "none" + is used. + :param dict env: Environment to use when parsing the imagery group + + :return: dictionary representing an imagery group with it's maps and their + semantic labels, row indices in the group, or metadata + :rtype: dict + """ + group_dict = {} + maps_in_group = ( + read_command( + "i.group", + group=imagery_group_name, + subgroup=subgroup, + flags="g", + quiet=True, + env=env, + ) + .strip() + .split() + ) + + if dict_keys not in {"indices", "map_names", "semantic_labels"}: + msg = f"Invalid dictionary keys <{dict_keys}> requested" + raise ValueError(msg) + + if dict_values not in {"indices", "map_names", "semantic_labels", "metadata"}: + msg = f"Invalid dictionary values <{dict_values}> requested" + raise ValueError(msg) + + if subgroup and not maps_in_group: + warning( + _("Empty result returned for subgroup <{sg}> in group <{g}>").format( + sg=subgroup, g=imagery_group_name + ) + ) + + for idx, raster_map in enumerate(maps_in_group): + raster_map_info = None + # Get raster metadata if needed + if ( + dict_values in {"semantic_labels", "metadata"} + or dict_keys == "semantic_labels" + ): + raster_map_info = raster_info(raster_map, env=env) + + # Get key for dictionary + if dict_keys == "indices": + key = str(idx + 1) + elif dict_keys == "map_names": + key = raster_map + elif dict_keys == "semantic_labels": + key = raster_map_info["semantic_label"] + if not key or key == '"none"': + if fill_semantic_label: + key = str(idx + 1) + else: + fatal( + _( + "Semantic label missing for raster map {m} in group <{g}>." + ).format(m=raster_map, g=imagery_group_name) + ) + + if dict_values == "indices": + val = str(idx + 1) + elif dict_values == "map_names": + val = raster_map + elif dict_values == "semantic_labels": + val = raster_map_info["semantic_label"] + elif dict_values == "metadata": + val = raster_map_info + if key in group_dict: + warning( + _( + "Key {k} from raster map {m} already present in group dictionary." + "Overwriting existing entry..." + ).format(k=key, r=raster_map) + ) + group_dict[key] = val + return group_dict diff --git a/python/grass/script/raster.py b/python/grass/script/raster.py index aaf9e0179b0..f3507cd48b0 100644 --- a/python/grass/script/raster.py +++ b/python/grass/script/raster.py @@ -255,8 +255,7 @@ def raster_what(map, coord, env=None, localized=False): for item in ret.splitlines(): line = item.split(sep)[3:] for i, map_name in enumerate(map_list): - tmp_dict = {} - tmp_dict[map_name] = {} + tmp_dict = {map_name: {}} for j in range(len(labels)): tmp_dict[map_name][labels[j]] = line[i * len(labels) + j] diff --git a/python/grass/script/raster3d.py b/python/grass/script/raster3d.py index 1a4a2782984..8b3148be4e3 100644 --- a/python/grass/script/raster3d.py +++ b/python/grass/script/raster3d.py @@ -30,6 +30,7 @@ def raster3d_info(map, env=None): """Return information about a raster3d map (interface to `r3.info`). + Example: >>> mapcalc3d('volume = row() + col() + depth()') diff --git a/python/grass/script/setup.py b/python/grass/script/setup.py index cadf05bbe38..e57c6e81668 100644 --- a/python/grass/script/setup.py +++ b/python/grass/script/setup.py @@ -65,7 +65,7 @@ session.finish() -(C) 2010-2024 by the GRASS Development Team +(C) 2010-2025 by the GRASS Development Team This program is free software under the GNU General Public License (>=v2). Read the file COPYING that comes with GRASS for details. @@ -404,9 +404,8 @@ def __enter__(self): :returns: reference to the object (self) """ if not self.active: - raise ValueError( - "Attempt to use inactive (finished) session as a context manager" - ) + msg = "Attempt to use inactive (finished) session as a context manager" + raise ValueError(msg) return self def __exit__(self, type, value, traceback): @@ -423,7 +422,8 @@ def finish(self): and finish the session. No GRASS modules can be called afterwards. """ if not self.active: - raise ValueError("Attempt to finish an already finished session") + msg = "Attempt to finish an already finished session" + raise ValueError(msg) self._active = False finish(env=self._env, start_time=self._start_time) diff --git a/python/grass/script/task.py b/python/grass/script/task.py index 758372938e3..495ff7d17d2 100644 --- a/python/grass/script/task.py +++ b/python/grass/script/task.py @@ -114,22 +114,14 @@ def get_list_params(self, element="name"): :param str element: element name """ - params = [] - for p in self.params: - params.append(p[element]) - - return params + return [p[element] for p in self.params] def get_list_flags(self, element="name"): """Get list of flags :param str element: element name """ - flags = [] - for p in self.flags: - flags.append(p[element]) - - return flags + return [p[element] for p in self.flags] def get_param(self, value, element="name", raiseError=True): """Find and return a param by name @@ -313,7 +305,7 @@ def _process_module(self): self.task.label = self._get_node_text(self.root, "label") self.task.description = self._get_node_text(self.root, "description") - def _process_params(self): + def _process_params(self) -> None: """Process parameters""" for p in self.root.findall("parameter"): # gisprompt @@ -347,15 +339,12 @@ def _process_params(self): multiple = p.get("multiple", "no") == "yes" required = p.get("required", "no") == "yes" - if ( + hidden: bool = bool( self.task.blackList["enabled"] and self.task.name in self.task.blackList["items"] and p.get("name") in self.task.blackList["items"][self.task.name].get("params", []) - ): - hidden = True - else: - hidden = False + ) self.task.params.append( { @@ -380,23 +369,17 @@ def _process_params(self): } ) - def _process_flags(self): + def _process_flags(self) -> None: """Process flags""" for p in self.root.findall("flag"): - if ( + hidden: bool = bool( self.task.blackList["enabled"] and self.task.name in self.task.blackList["items"] and p.get("name") in self.task.blackList["items"][self.task.name].get("flags", []) - ): - hidden = True - else: - hidden = False + ) - if p.find("suppress_required") is not None: - suppress_required = True - else: - suppress_required = False + suppress_required: bool = bool(p.find("suppress_required") is not None) self.task.flags.append( { @@ -557,12 +540,14 @@ def command_info(cmd): :param str cmd: the command to query """ task = parse_interface(cmd) - cmdinfo = {} - - cmdinfo["description"] = task.get_description() - cmdinfo["keywords"] = task.get_keywords() - cmdinfo["flags"] = flags = task.get_options()["flags"] - cmdinfo["params"] = params = task.get_options()["params"] + flags = task.get_options()["flags"] + params = task.get_options()["params"] + cmdinfo = { + "description": task.get_description(), + "keywords": task.get_keywords(), + "flags": flags, + "params": params, + } usage = task.get_name() flags_short = [] diff --git a/python/grass/script/tests/grass_script_setup_test.py b/python/grass/script/tests/grass_script_setup_test.py index 36dfbb77dbb..f2593d1e0ca 100644 --- a/python/grass/script/tests/grass_script_setup_test.py +++ b/python/grass/script/tests/grass_script_setup_test.py @@ -8,6 +8,9 @@ import grass.script as gs +RUNTIME_GISBASE_SHOULD_BE_PRESENT = "Runtime (GISBASE) should be present" +SESSION_FILE_NOT_DELETED = "Session file not deleted" + xfail_mp_spawn = pytest.mark.xfail( multiprocessing.get_start_method() == "spawn", reason="Multiprocessing using 'spawn' start method requires pickable functions", @@ -91,8 +94,8 @@ def test_init_finish_global_functions_capture_strerr0_partial(tmp_path): ) session_file, runtime_present = run_in_subprocess(init_finish) assert session_file, "Expected file name from the subprocess" - assert runtime_present, "Runtime (GISBASE) should be present" - assert not os.path.exists(session_file), "Session file not deleted" + assert runtime_present, RUNTIME_GISBASE_SHOULD_BE_PRESENT + assert not os.path.exists(session_file), SESSION_FILE_NOT_DELETED @xfail_mp_spawn @@ -113,8 +116,8 @@ def init_finish(queue): session_file, runtime_present = run_in_subprocess(init_finish) assert session_file, "Expected file name from the subprocess" - assert runtime_present, "Runtime (GISBASE) should be present" - assert not os.path.exists(session_file), "Session file not deleted" + assert runtime_present, RUNTIME_GISBASE_SHOULD_BE_PRESENT + assert not os.path.exists(session_file), SESSION_FILE_NOT_DELETED @xfail_mp_spawn @@ -139,8 +142,8 @@ def init_finish(queue): init_finish ) assert session_file, "Expected file name from the subprocess" - assert runtime_present, "Runtime (GISBASE) should be present" - assert not os.path.exists(session_file), "Session file not deleted" + assert runtime_present, RUNTIME_GISBASE_SHOULD_BE_PRESENT + assert not os.path.exists(session_file), SESSION_FILE_NOT_DELETED # This is testing the current implementation behavior, but it is not required # to be this way in terms of design. assert runtime_present_after, "Runtime should continue to be present" @@ -189,7 +192,7 @@ def init_finish(queue): ) = run_in_subprocess(init_finish) # Runtime - assert runtime_present_during, "Runtime (GISBASE) should be present" + assert runtime_present_during, RUNTIME_GISBASE_SHOULD_BE_PRESENT # This is testing the current implementation behavior, but it is not required # to be this way in terms of design. assert runtime_present_after, "Expected GISBASE to be present when finished" @@ -198,7 +201,7 @@ def init_finish(queue): assert session_file_present_during, "Expected session file to be present" assert session_file_variable_present_during, "Variable GISRC should be present" assert not session_file_variable_present_after, "Not expecting GISRC when finished" - assert not os.path.exists(session_file), "Session file not deleted" + assert not os.path.exists(session_file), SESSION_FILE_NOT_DELETED @xfail_mp_spawn @@ -220,8 +223,8 @@ def workload(queue): session_file, file_existed, runtime_present = run_in_subprocess(workload) assert session_file, "Expected file name from the subprocess" assert file_existed, "File should have been present" - assert runtime_present, "Runtime (GISBASE) should be present" - assert not os.path.exists(session_file), "Session file not deleted" + assert runtime_present, RUNTIME_GISBASE_SHOULD_BE_PRESENT + assert not os.path.exists(session_file), SESSION_FILE_NOT_DELETED assert not os.environ.get("GISRC") assert not os.environ.get("GISBASE") diff --git a/python/grass/script/tests/test_script_task.py b/python/grass/script/tests/test_script_task.py index cae8735827e..aff10500d7c 100644 --- a/python/grass/script/tests/test_script_task.py +++ b/python/grass/script/tests/test_script_task.py @@ -1,11 +1,54 @@ +import os + +import pytest + +import grass.script as gs from grass.script.task import grassTask -def test_mapcalc_simple_e_name(): +@pytest.fixture +def xy_session_patched_env(tmp_path, monkeypatch): + """Active session in an XY location (scope: function), patching env vars directly. + + This allows functions not accepting an env dictionary argument to work in tests""" + location = "xy_test" + gs.core._create_location_xy(tmp_path, location) # pylint: disable=protected-access + with gs.setup.init(tmp_path / location, env=os.environ.copy()) as session: + for key, value in session.env.items(): + monkeypatch.setenv(key, value) + yield session + + +def test_mapcalc_simple_e_name(xy_session_patched_env): gt = grassTask("r.mapcalc.simple") assert gt.get_param("e")["name"] == "e" -def test_mapcalc_simple_expession_name(): +def test_mapcalc_simple_expression_name(xy_session_patched_env): gt = grassTask("r.mapcalc.simple") assert gt.get_param("expression")["name"] == "expression" + + +def test_d_vect_from_bin(xy_session_patched_env): + """Tests that a module installed in "$GISBASE/bin can be used with grassTask""" + task = grassTask("d.vect") + task.get_param("map")["value"] = "map_name" + task.get_flag("i")["value"] = True + task.get_param("layer")["value"] = 1 + task.get_param("label_bcolor")["value"] = "red" + # the default parameter display is added automatically + actual = " ".join(task.get_cmd()) + expected = "d.vect -i map=map_name layer=1 display=shape label_bcolor=red" + assert actual == expected + + +def test_v_clip_from_scripts(xy_session_patched_env): + """Tests that a module installed in "$GISBASE/scripts can be used with grassTask""" + task = grassTask("v.clip") + task.get_param("input")["value"] = "map_name" + task.get_flag("r")["value"] = True + task.get_param("clip")["value"] = "clip_map_name" + task.get_param("output")["value"] = "output_map_name" + actual = " ".join(task.get_cmd()) + expected = "v.clip -r input=map_name clip=clip_map_name output=output_map_name" + assert actual == expected diff --git a/python/grass/script/testsuite/test_imagery.py b/python/grass/script/testsuite/test_imagery.py new file mode 100644 index 00000000000..543bda774b6 --- /dev/null +++ b/python/grass/script/testsuite/test_imagery.py @@ -0,0 +1,163 @@ +from grass.exceptions import CalledModuleError +from grass.gunittest.case import TestCase +from grass.gunittest.main import test + +import grass.script as gs + + +class TestImageryGroupToDict(TestCase): + """Tests function `group_to_dict` that returns raster maps + from an imagery group and their metadata.""" + + @classmethod + def setUpClass(cls): + cls.bands = [1, 2, 3] + cls.raster_maps = [f"lsat7_2002_{band}0" for band in cls.bands] + cls.group = "L8_group" + cls.subgroup = "L8_group_subgroup" + # Create input maps with label and group + for band in cls.bands: + cls.runModule( + "g.copy", raster=[f"lsat7_2002_{band}0", f"lsat7_2002_{band}0"] + ) + cls.runModule( + "r.support", map=f"lsat7_2002_{band}0", semantic_label=f"L8_{band}" + ) + cls.runModule("i.group", group=cls.group, input=cls.raster_maps) + + @classmethod + def tearDownClass(cls): + cls.runModule("g.remove", type="raster", name=cls.raster_maps, flags="f") + cls.runModule("g.remove", type="group", name=cls.group, flags="f") + + def test_basic_group_dict_defaults(self): + """Test with semantic labels as keys and map names as values (defaults)""" + ref_dict = {f"L8_{band}": f"lsat7_2002_{band}0" for band in self.bands} + group_info = gs.imagery.group_to_dict(self.group) + # Check that a dict is returned + self.assertIsInstance(group_info, dict) + self.assertListEqual(list(ref_dict.keys()), list(group_info.keys())) + self.assertListEqual( + list(ref_dict.values()), [val.split("@")[0] for val in group_info.values()] + ) + + def test_non_existing_group(self): + """Test that function fails if group does not exist""" + # Non existing group + self.assertRaises( + CalledModuleError, gs.imagery.group_to_dict, "non_existing_group" + ) + + def test_invalid_dict_key(self): + """Test that function fails if invalid keys are requested""" + self.assertRaises( + ValueError, + gs.imagery.group_to_dict, + self.group, + dict_keys="invalid_dict_key", + ) + + def test_invalid_dict_value(self): + """Test that function fails if invalid values are requested""" + self.assertRaises( + ValueError, + gs.imagery.group_to_dict, + self.group, + dict_values="invalid_dict_value", + ) + + def test_missing_subgroup(self): + """Test that empty dict is returned if subgroup does not exist""" + group_info = gs.imagery.group_to_dict( + self.group, subgroup="non_existing_subgroup" + ) + + # Check that an empty dict is returned + self.assertDictEqual(group_info, {}) + + def test_basic_group_map_keys(self): + """Test with map_names as keys and semantic_labels as values""" + ref_dict = {f"lsat7_2002_{band}0": f"L8_{band}" for band in self.bands} + group_info = gs.imagery.group_to_dict( + self.group, dict_keys="map_names", dict_values="semantic_labels" + ) + # Check that a dict is returned + self.assertIsInstance(group_info, dict) + self.assertListEqual( + list(ref_dict.keys()), [key.split("@")[0] for key in group_info.keys()] + ) + self.assertListEqual(list(ref_dict.values()), list(group_info.values())) + + def test_basic_group_index_keys(self): + """Test with indices as keys and mapnames as values""" + ref_dict = {str(band): f"lsat7_2002_{band}0" for band in self.bands} + group_info = gs.imagery.group_to_dict(self.group, dict_keys="indices") + # Check that a dict is returned + self.assertIsInstance(group_info, dict) + self.assertListEqual(list(ref_dict.keys()), list(group_info.keys())) + self.assertListEqual( + list(ref_dict.values()), + [val.split("@")[0] for val in group_info.values()], + ) + + def test_full_info_group_label_keys(self): + """Test with semantic labels as keys and full map metadata as values""" + group_info = gs.imagery.group_to_dict(self.group, dict_values="metadata") + # Check that a dict is returned + self.assertIsInstance(group_info, dict) + self.assertListEqual( + [f"L8_{band}" for band in self.bands], + [key.split("@")[0] for key in group_info.keys()], + ) + for band in self.bands: + # Take some metadata keys from raster_info + for metadata_key in [ + "north", + "nsres", + "cols", + "datatype", + "map", + "date", + "semantic_label", + "comments", + ]: + self.assertIn(metadata_key, group_info[f"L8_{band}"]) + + def test_full_info_group_label_keys_subgroup(self): + """Test with map names as keys and full map metadata as values""" + metadata_keys = { + "north", + "nsres", + "cols", + "datatype", + "map", + "date", + "semantic_label", + "comments", + } + self.runModule( + "i.group", group=self.group, subgroup=self.subgroup, input=self.raster_maps + ) + group_info = gs.imagery.group_to_dict( + self.group, + subgroup=self.subgroup, + dict_keys="map_names", + dict_values="metadata", + ) + # Check that a dict is returned + self.assertIsInstance(group_info, dict) + self.assertListEqual( + [f"lsat7_2002_{band}0" for band in self.bands], + [key.split("@")[0] for key in group_info.keys()], + ) + for key, val in group_info.items(): + # Check keys + self.assertTrue(key.startswith("lsat7_2002_")) + # Check values + self.assertIsInstance(val, dict) + # Take some metadata keys from raster_info + self.assertTrue(metadata_keys.issubset(set(val.keys()))) + + +if __name__ == "__main__": + test() diff --git a/python/grass/script/utils.py b/python/grass/script/utils.py index 0a32a9e7b4e..64677731835 100644 --- a/python/grass/script/utils.py +++ b/python/grass/script/utils.py @@ -17,6 +17,8 @@ .. sectionauthor:: Anna Petrasova """ +from __future__ import annotations + import os import shutil import locale @@ -29,9 +31,19 @@ import string from pathlib import Path +from typing import TYPE_CHECKING, AnyStr, Callable, TypeVar, cast, overload + + +if TYPE_CHECKING: + from _typeshed import FileDescriptorOrPath, StrOrBytesPath, StrPath + +# Type variables +T = TypeVar("T") +VT = TypeVar("VT") # Value type -def float_or_dms(s): + +def float_or_dms(s) -> float: """Convert DMS to float. >>> round(float_or_dms('26:45:30'), 5) @@ -48,7 +60,7 @@ def float_or_dms(s): return sum(float(x) / 60**n for (n, x) in enumerate(s.split(":"))) -def separator(sep): +def separator(sep: str) -> str: """Returns separator from G_OPT_F_SEP appropriately converted to character. @@ -80,7 +92,9 @@ def separator(sep): return sep -def diff_files(filename_a, filename_b): +def diff_files( + filename_a: FileDescriptorOrPath, filename_b: FileDescriptorOrPath +) -> list[str]: """Diffs two text files and returns difference. :param str filename_a: first file path @@ -96,7 +110,7 @@ def diff_files(filename_a, filename_b): return list(differ.compare(fh_a.readlines(), fh_b.readlines())) -def try_remove(path): +def try_remove(path: StrOrBytesPath) -> None: """Attempt to remove a file; no exception is generated if the attempt fails. @@ -108,7 +122,7 @@ def try_remove(path): pass -def try_rmdir(path): +def try_rmdir(path: StrOrBytesPath) -> None: """Attempt to remove a directory; no exception is generated if the attempt fails. @@ -120,23 +134,23 @@ def try_rmdir(path): shutil.rmtree(path, ignore_errors=True) -def basename(path, ext=None): +def basename(path: StrPath, ext: str | None = None) -> str: """Remove leading directory components and an optional extension from the specified path :param str path: path :param str ext: extension """ - name = os.path.basename(path) + name: str = os.path.basename(path) if not ext: return name - fs = name.rsplit(".", 1) + fs: list[str] = name.rsplit(".", 1) if len(fs) > 1 and fs[1].lower() == ext: name = fs[0] return name -class KeyValue(dict): +class KeyValue(dict[str, VT]): """A general-purpose key-value store. KeyValue is a subclass of dict, but also allows entries to be read and @@ -149,16 +163,19 @@ class KeyValue(dict): >>> reg.south = 205 >>> reg['south'] 205 + + The keys of KeyValue are strings. To use other key types, use other mapping types. + To use the attribute syntax, the keys must be valid Python attribute names. """ - def __getattr__(self, key): + def __getattr__(self, key: str) -> VT: return self[key] - def __setattr__(self, key, value): + def __setattr__(self, key: str, value: VT) -> None: self[key] = value -def _get_encoding(): +def _get_encoding() -> str: try: # Python >= 3.11 encoding = locale.getencoding() @@ -169,7 +186,7 @@ def _get_encoding(): return encoding -def decode(bytes_, encoding=None): +def decode(bytes_: AnyStr, encoding: str | None = None) -> str: """Decode bytes with default locale and return (unicode) string No-op if parameter is not bytes (assumed unicode string). @@ -193,16 +210,17 @@ def decode(bytes_, encoding=None): enc = _get_encoding() if encoding is None else encoding return bytes_.decode(enc) # only text should be used - raise TypeError("can only accept types str and bytes") + msg = "can only accept types str and bytes" + raise TypeError(msg) -def encode(string, encoding=None): +def encode(string: AnyStr, encoding: str | None = None) -> bytes: """Encode string with default locale and return bytes with that encoding No-op if parameter is bytes (assumed already encoded). This ensures garbage in, garbage out. - :param str string: the string to encode + :param string: the string to encode :param encoding: encoding to be used, default value is None Example @@ -221,35 +239,77 @@ def encode(string, encoding=None): enc = _get_encoding() if encoding is None else encoding return string.encode(enc) # if something else than text - raise TypeError("can only accept types str and bytes") + msg = "Can only accept types str and bytes" + raise TypeError(msg) -def text_to_string(text, encoding=None): +def text_to_string(text: AnyStr, encoding: str | None = None) -> str: """Convert text to str. Useful when passing text into environments, in Python 2 it needs to be bytes on Windows, in Python 3 in needs unicode. """ return decode(text, encoding=encoding) -def parse_key_val(s, sep="=", dflt=None, val_type=None, vsep=None): +@overload +def parse_key_val( + s: AnyStr, + sep: str = "=", + dflt: T | None = None, + val_type: None = ..., + vsep: str | None = None, +) -> KeyValue[str | T | None]: + pass + + +@overload +def parse_key_val( + s: AnyStr, + sep: str = "=", + dflt: T | None = None, + val_type: Callable[[str], T] = ..., + vsep: str | None = None, +) -> KeyValue[T | None]: + pass + + +@overload +def parse_key_val( + s: AnyStr, + sep: str = "=", + dflt: T | None = None, + val_type: Callable[[str], T] | None = None, + vsep: str | None = None, +) -> KeyValue[str | T] | KeyValue[T | None] | KeyValue[T] | KeyValue[str | T | None]: + pass + + +def parse_key_val( + s: AnyStr, + sep: str = "=", + dflt: T | None = None, + val_type: Callable[[str], T] | None = None, + vsep: str | None = None, +) -> KeyValue[str | T] | KeyValue[T | None] | KeyValue[T] | KeyValue[str | T | None]: """Parse a string into a dictionary, where entries are separated by newlines and the key and value are separated by `sep` (default: `=`) >>> parse_key_val('min=20\\nmax=50') == {'min': '20', 'max': '50'} True - >>> parse_key_val('min=20\\nmax=50', - ... val_type=float) == {'min': 20, 'max': 50} + >>> parse_key_val('min=20\\nmax=50', val_type=float) == {'min': 20, 'max': 50} True - :param str s: string to be parsed - :param str sep: key/value separator + :param s: string to be parsed + :param sep: key/value separator :param dflt: default value to be used :param val_type: value type (None for no cast) :param vsep: vertical separator (default is Python 'universal newlines' approach) :return: parsed input (dictionary of keys/values) """ - result = KeyValue() + + result: ( + KeyValue[str | T] | KeyValue[T | None] | KeyValue[T] | KeyValue[str | T | None] + ) = KeyValue() if not s: return result @@ -259,7 +319,7 @@ def parse_key_val(s, sep="=", dflt=None, val_type=None, vsep=None): vsep = encode(vsep) if vsep else vsep if vsep: - lines = s.split(vsep) + lines: list[bytes] | list[str] = s.split(vsep) try: lines.remove("\n") except ValueError: @@ -267,15 +327,25 @@ def parse_key_val(s, sep="=", dflt=None, val_type=None, vsep=None): else: lines = s.splitlines() + if callable(val_type): + result = cast("KeyValue[T | None]", result) + for line in lines: + kv: list[bytes] | list[str] = line.split(sep, 1) + k: str = decode(kv[0].strip()) + result[k] = val_type(decode(kv[1].strip())) if len(kv) > 1 else dflt + + if dflt is not None: + result = cast("KeyValue[T]", result) + return result + + result = cast("KeyValue[str | T | None]", result) for line in lines: kv = line.split(sep, 1) k = decode(kv[0].strip()) - v = decode(kv[1].strip()) if len(kv) > 1 else dflt + result[k] = decode(kv[1].strip()) if len(kv) > 1 else dflt - if val_type: - result[k] = val_type(v) - else: - result[k] = v + if dflt is not None: + result = cast("KeyValue[str | T]", result) return result @@ -345,15 +415,15 @@ def convert(text): def alphanum_key(actual_key): sort_key = key(actual_key) if key else actual_key - return [convert(c) for c in re.split("([0-9]+)", sort_key)] + return [convert(c) for c in re.split(r"([0-9]+)", sort_key)] items.sort(key=alphanum_key) def get_lib_path(modname, libname=None): """Return the path of the libname contained in the module.""" - from os.path import isdir, join, sep from os import getenv + from os.path import isdir, join, sep if isdir(join(getenv("GISBASE"), "etc", modname)): path = join(os.getenv("GISBASE"), "etc", modname) @@ -452,10 +522,10 @@ def set_path(modulename, dirname=None, path="."): import sys # TODO: why dirname is checked first - the logic should be revised - _pathlib = None + pathlib_ = None if dirname: - _pathlib = os.path.join(path, dirname) - if _pathlib and os.path.exists(_pathlib): + pathlib_ = os.path.join(path, dirname) + if pathlib_ and os.path.exists(pathlib_): # we are running the script from the script directory, therefore # we add the path to sys.path to reach the directory (dirname) sys.path.append(os.path.abspath(path)) @@ -500,16 +570,18 @@ def legalize_vector_name(name, fallback_prefix="x"): """ # The implementation is based on Vect_legal_filename(). if not name: - raise ValueError("name cannot be empty") - if fallback_prefix and re.match("[^A-Za-z]", fallback_prefix[0]): - raise ValueError("fallback_prefix must start with an ASCII letter") - if fallback_prefix and re.match("[^A-Za-z]", name[0], flags=re.ASCII): + msg = "name cannot be empty" + raise ValueError(msg) + if fallback_prefix and re.match(r"[^A-Za-z]", fallback_prefix[0]): + msg = "fallback_prefix must start with an ASCII letter" + raise ValueError(msg) + if fallback_prefix and re.match(r"[^A-Za-z]", name[0], flags=re.ASCII): # We prefix here rather than just replace, because in cases of unique # identifiers, e.g., columns or node names, replacing the first # character by the same replacement character increases chances of # conflict (e.g. column names 10, 20, 30). name = "{fallback_prefix}{name}".format(**locals()) - name = re.sub("[^A-Za-z0-9_]", "_", name, flags=re.ASCII) + name = re.sub(r"[^A-Za-z0-9_]", "_", name, flags=re.ASCII) keywords = ["and", "or", "not"] if name in keywords: name = "{name}_".format(**locals()) @@ -587,21 +659,22 @@ def append_random(name, suffix_length=None, total_length=None): :func:`append_node_pid()` description. """ if suffix_length and total_length: - raise ValueError( - "Either suffix_length or total_length can be provided, not both" - ) + msg = "Either suffix_length or total_length can be provided, not both" + raise ValueError(msg) if not suffix_length and not total_length: - raise ValueError("suffix_length or total_length has to be provided") + msg = "suffix_length or total_length has to be provided" + raise ValueError(msg) if total_length: # remove len of name and one underscore name_length = len(name) suffix_length = total_length - name_length - 1 if suffix_length <= 0: - raise ValueError( + msg = ( "No characters left for the suffix:" " total_length <{total_length}> is too small" " or name <{name}> ({name_length}) is too long".format(**locals()) ) + raise ValueError(msg) # We don't do lower and upper case because that could cause conflicts in # contexts which are case-insensitive. # We use lowercase because that's what is in UUID4 hex string. diff --git a/python/grass/script/vector.py b/python/grass/script/vector.py index 4adf3e38da1..8a05069839a 100644 --- a/python/grass/script/vector.py +++ b/python/grass/script/vector.py @@ -163,6 +163,7 @@ def vector_history(map, replace=False, env=None): def vector_info_topo(map, layer=1, env=None): """Return information about a vector map (interface to `v.info -t`). + Example: >>> vector_info_topo("geology") # doctest: +NORMALIZE_WHITESPACE diff --git a/python/grass/semantic_label/reader.py b/python/grass/semantic_label/reader.py index 591006fe352..358e7fafdf9 100644 --- a/python/grass/semantic_label/reader.py +++ b/python/grass/semantic_label/reader.py @@ -26,7 +26,8 @@ def __init__(self): os.path.join(os.environ["GISBASE"], "etc", "i.band.library", "*.json") ) if not self._json_files: - raise SemanticLabelReaderError("No semantic label definitions found") + msg = "No semantic label definitions found" + raise SemanticLabelReaderError(msg) self._read_config() @@ -38,9 +39,8 @@ def _read_config(self): with open(json_file) as fd: config = json.load(fd, object_pairs_hook=OrderedDict) except json.decoder.JSONDecodeError as e: - raise SemanticLabelReaderError( - "Unable to parse '{}': {}".format(json_file, e) - ) + msg = "Unable to parse '{}': {}".format(json_file, e) + raise SemanticLabelReaderError(msg) # check if configuration is valid self._check_config(config) @@ -58,13 +58,11 @@ def _check_config(config): for items in config.values(): for item in ("shortcut", "bands"): if item not in items.keys(): - raise SemanticLabelReaderError( - "Invalid band definition: <{}> is missing".format(item) - ) + msg = "Invalid band definition: <{}> is missing".format(item) + raise SemanticLabelReaderError(msg) if len(items["bands"]) < 1: - raise SemanticLabelReaderError( - "Invalid band definition: no bands defined" - ) + msg = "Invalid band definition: no bands defined" + raise SemanticLabelReaderError(msg) @staticmethod def _print_label_extended(label, item): @@ -116,13 +114,13 @@ def print_info(self, shortcut=None, band=None, semantic_label=None, extended=Fal if shortcut and re.match(shortcut, item["shortcut"]) is None: continue except re.error as e: - raise SemanticLabelReaderError("Invalid pattern: {}".format(e)) + msg = "Invalid pattern: {}".format(e) + raise SemanticLabelReaderError(msg) found = True if band and band not in item["bands"]: - raise SemanticLabelReaderError( - "Band <{}> not found in <{}>".format(band, shortcut) - ) + msg = "Band <{}> not found in <{}>".format(band, shortcut) + raise SemanticLabelReaderError(msg) # print generic information if extended: @@ -192,9 +190,9 @@ def get_bands(self): :return list: list of valid band identifiers """ - bands = [] - for root in self.config.values(): - for item in root.values(): - for band in item["bands"]: - bands.append("{}_{}".format(item["shortcut"], band)) - return bands + return [ + "{}_{}".format(item["shortcut"], band) + for root in self.config.values() + for item in root.values() + for band in item["bands"] + ] diff --git a/python/grass/temporal/__init__.py b/python/grass/temporal/__init__.py index 2048b66ccf0..b70fe0ab96f 100644 --- a/python/grass/temporal/__init__.py +++ b/python/grass/temporal/__init__.py @@ -1,33 +1,360 @@ -from .abstract_dataset import * -from .abstract_map_dataset import * -from .abstract_space_time_dataset import * -from .aggregation import * -from .base import * -from .c_libraries_interface import * -from .core import * -from .datetime_math import * -from .extract import * -from .factory import * -from .gui_support import * -from .list_stds import * -from .mapcalc import * -from .metadata import * -from .open_stds import * -from .register import * -from .sampling import * -from .space_time_datasets import * -from .spatial_extent import * -from .spatial_topology_dataset_connector import * -from .spatio_temporal_relationships import * -from .stds_export import * -from .stds_import import * -from .temporal_algebra import * -from .temporal_extent import * -from .temporal_granularity import * -from .temporal_operator import * -from .temporal_raster3d_algebra import * -from .temporal_raster_algebra import * -from .temporal_raster_base_algebra import * -from .temporal_topology_dataset_connector import * -from .temporal_vector_algebra import * -from .univar_statistics import * +from .abstract_dataset import ( + AbstractDataset, + AbstractDatasetComparisonKeyEndTime, + AbstractDatasetComparisonKeyStartTime, +) +from .abstract_map_dataset import AbstractMapDataset +from .abstract_space_time_dataset import AbstractSpaceTimeDataset +from .aggregation import aggregate_by_topology, aggregate_raster_maps, collect_map_names +from .base import ( + AbstractSTDSRegister, + DatasetBase, + DictSQLSerializer, + Raster3DBase, + Raster3DSTDSRegister, + RasterBase, + RasterSTDSRegister, + SQLDatabaseInterface, + STDSBase, + STR3DSBase, + STRDSBase, + STVDSBase, + VectorBase, + VectorSTDSRegister, +) +from .c_libraries_interface import CLibrariesInterface, RPCDefs, c_library_server +from .core import ( + DBConnection, + SQLDatabaseInterfaceConnection, + create_temporal_database, + get_available_temporal_mapsets, + get_current_gisdbase, + get_current_location, + get_current_mapset, + get_database_info_string, + get_enable_mapset_check, + get_enable_timestamp_write, + get_raise_on_error, + get_sql_template_path, + get_tgis_backend, + get_tgis_c_library_interface, + get_tgis_database, + get_tgis_database_string, + get_tgis_db_version, + get_tgis_db_version_from_metadata, + get_tgis_dbmi_paramstyle, + get_tgis_message_interface, + get_tgis_metadata, + get_tgis_version, + init, + init_dbif, + profile_function, + set_raise_on_error, + stop_subprocesses, + upgrade_temporal_database, +) +from .datetime_math import ( + adjust_datetime_to_granularity, + check_datetime_string, + compute_datetime_delta, + create_numeric_suffix, + create_suffix_from_datetime, + create_time_suffix, + datetime_to_grass_datetime_string, + decrement_datetime_by_string, + increment_datetime_by_string, + modify_datetime, + modify_datetime_by_string, + relative_time_to_time_delta, + relative_time_to_time_delta_seconds, + string_to_datetime, + time_delta_to_relative_time, + time_delta_to_relative_time_seconds, +) +from .extract import ( + extract_dataset, + run_mapcalc2d, + run_mapcalc3d, + run_vector_extraction, +) +from .factory import dataset_factory +from .gui_support import tlist, tlist_grouped +from .list_stds import get_dataset_list, list_maps_of_stds +from .mapcalc import dataset_mapcalculator +from .metadata import ( + Raster3DMetadata, + RasterMetadata, + RasterMetadataBase, + STDSMetadataBase, + STDSRasterMetadataBase, + STR3DSMetadata, + STRDSMetadata, + STVDSMetadata, + VectorMetadata, +) +from .open_stds import ( + check_new_map_dataset, + check_new_stds, + open_new_map_dataset, + open_new_stds, + open_old_stds, +) +from .register import ( + assign_valid_time_to_map, + register_map_object_list, + register_maps_in_space_time_dataset, +) +from .sampling import sample_stds_by_stds_topology +from .space_time_datasets import ( + Raster3DDataset, + RasterDataset, + SpaceTimeRaster3DDataset, + SpaceTimeRasterDataset, + SpaceTimeVectorDataset, + VectorDataset, +) +from .spatial_extent import ( + Raster3DSpatialExtent, + RasterSpatialExtent, + SpatialExtent, + STR3DSSpatialExtent, + STRDSSpatialExtent, + STVDSSpatialExtent, + VectorSpatialExtent, +) +from .spatial_topology_dataset_connector import SpatialTopologyDatasetConnector +from .spatio_temporal_relationships import ( + SpatioTemporalTopologyBuilder, + count_temporal_topology_relationships, + create_temporal_relation_sql_where_statement, + print_spatio_temporal_topology_relationships, + print_temporal_topology_relationships, + set_spatial_relationship, + set_temporal_relationship, +) +from .stds_export import export_stds +from .stds_import import import_stds +from .temporal_algebra import ( + FatalError, + GlobalTemporalVar, + TemporalAlgebraLexer, + TemporalAlgebraParser, +) +from .temporal_extent import ( + AbsoluteTemporalExtent, + Raster3DAbsoluteTime, + Raster3DRelativeTime, + RasterAbsoluteTime, + RasterRelativeTime, + RelativeTemporalExtent, + STDSAbsoluteTime, + STDSRelativeTime, + STR3DSAbsoluteTime, + STR3DSRelativeTime, + STRDSAbsoluteTime, + STRDSRelativeTime, + STVDSAbsoluteTime, + STVDSRelativeTime, + TemporalExtent, + VectorAbsoluteTime, + VectorRelativeTime, +) +from .temporal_granularity import ( + check_granularity_string, + compute_absolute_time_granularity, + compute_common_absolute_time_granularity, + compute_common_absolute_time_granularity_simple, + compute_common_relative_time_granularity, + compute_relative_time_granularity, + gcd, + gcd_list, + get_time_tuple_function, + gran_plural_unit, + gran_singular_unit, + gran_to_gran, +) +from .temporal_operator import TemporalOperatorLexer, TemporalOperatorParser +from .temporal_raster3d_algebra import TemporalRaster3DAlgebraParser +from .temporal_raster_algebra import TemporalRasterAlgebraParser +from .temporal_raster_base_algebra import ( + TemporalRasterAlgebraLexer, + TemporalRasterBaseAlgebraParser, +) +from .temporal_topology_dataset_connector import TemporalTopologyDatasetConnector +from .temporal_vector_algebra import ( + TemporalVectorAlgebraLexer, + TemporalVectorAlgebraParser, +) +from .univar_statistics import ( + compute_univar_stats, + print_gridded_dataset_univar_statistics, + print_vector_dataset_univar_statistics, +) + +__all__ = [ + "AbsoluteTemporalExtent", + "AbstractDataset", + "AbstractDatasetComparisonKeyEndTime", + "AbstractDatasetComparisonKeyStartTime", + "AbstractMapDataset", + "AbstractSTDSRegister", + "AbstractSpaceTimeDataset", + "CLibrariesInterface", + "DBConnection", + "DatasetBase", + "DictSQLSerializer", + "FatalError", + "GlobalTemporalVar", + "RPCDefs", + "Raster3DAbsoluteTime", + "Raster3DBase", + "Raster3DDataset", + "Raster3DMetadata", + "Raster3DRelativeTime", + "Raster3DSTDSRegister", + "Raster3DSpatialExtent", + "RasterAbsoluteTime", + "RasterBase", + "RasterDataset", + "RasterMetadata", + "RasterMetadataBase", + "RasterRelativeTime", + "RasterSTDSRegister", + "RasterSpatialExtent", + "RelativeTemporalExtent", + "SQLDatabaseInterface", + "SQLDatabaseInterfaceConnection", + "STDSAbsoluteTime", + "STDSBase", + "STDSMetadataBase", + "STDSRasterMetadataBase", + "STDSRelativeTime", + "STR3DSAbsoluteTime", + "STR3DSBase", + "STR3DSMetadata", + "STR3DSRelativeTime", + "STR3DSSpatialExtent", + "STRDSAbsoluteTime", + "STRDSBase", + "STRDSMetadata", + "STRDSRelativeTime", + "STRDSSpatialExtent", + "STVDSAbsoluteTime", + "STVDSBase", + "STVDSMetadata", + "STVDSRelativeTime", + "STVDSSpatialExtent", + "SpaceTimeRaster3DDataset", + "SpaceTimeRasterDataset", + "SpaceTimeVectorDataset", + "SpatialExtent", + "SpatialTopologyDatasetConnector", + "SpatioTemporalTopologyBuilder", + "TemporalAlgebraLexer", + "TemporalAlgebraParser", + "TemporalExtent", + "TemporalOperatorLexer", + "TemporalOperatorParser", + "TemporalRaster3DAlgebraParser", + "TemporalRasterAlgebraLexer", + "TemporalRasterAlgebraParser", + "TemporalRasterBaseAlgebraParser", + "TemporalTopologyDatasetConnector", + "TemporalVectorAlgebraLexer", + "TemporalVectorAlgebraParser", + "VectorAbsoluteTime", + "VectorBase", + "VectorDataset", + "VectorMetadata", + "VectorRelativeTime", + "VectorSTDSRegister", + "VectorSpatialExtent", + "adjust_datetime_to_granularity", + "aggregate_by_topology", + "aggregate_raster_maps", + "assign_valid_time_to_map", + "c_library_server", + "check_datetime_string", + "check_granularity_string", + "check_new_map_dataset", + "check_new_stds", + "collect_map_names", + "compute_absolute_time_granularity", + "compute_common_absolute_time_granularity", + "compute_common_absolute_time_granularity_simple", + "compute_common_relative_time_granularity", + "compute_datetime_delta", + "compute_relative_time_granularity", + "compute_univar_stats", + "count_temporal_topology_relationships", + "create_numeric_suffix", + "create_suffix_from_datetime", + "create_temporal_database", + "create_temporal_relation_sql_where_statement", + "create_time_suffix", + "dataset_factory", + "dataset_mapcalculator", + "datetime_to_grass_datetime_string", + "decrement_datetime_by_string", + "export_stds", + "extract_dataset", + "gcd", + "gcd_list", + "get_available_temporal_mapsets", + "get_current_gisdbase", + "get_current_location", + "get_current_mapset", + "get_database_info_string", + "get_dataset_list", + "get_enable_mapset_check", + "get_enable_timestamp_write", + "get_raise_on_error", + "get_sql_template_path", + "get_tgis_backend", + "get_tgis_c_library_interface", + "get_tgis_database", + "get_tgis_database_string", + "get_tgis_db_version", + "get_tgis_db_version_from_metadata", + "get_tgis_dbmi_paramstyle", + "get_tgis_message_interface", + "get_tgis_metadata", + "get_tgis_version", + "get_time_tuple_function", + "gran_plural_unit", + "gran_singular_unit", + "gran_to_gran", + "import_stds", + "increment_datetime_by_string", + "init", + "init_dbif", + "list_maps_of_stds", + "modify_datetime", + "modify_datetime_by_string", + "open_new_map_dataset", + "open_new_stds", + "open_old_stds", + "print_gridded_dataset_univar_statistics", + "print_spatio_temporal_topology_relationships", + "print_temporal_topology_relationships", + "print_vector_dataset_univar_statistics", + "profile_function", + "register_map_object_list", + "register_maps_in_space_time_dataset", + "relative_time_to_time_delta", + "relative_time_to_time_delta_seconds", + "run_mapcalc2d", + "run_mapcalc3d", + "run_vector_extraction", + "sample_stds_by_stds_topology", + "set_raise_on_error", + "set_spatial_relationship", + "set_temporal_relationship", + "stop_subprocesses", + "string_to_datetime", + "time_delta_to_relative_time", + "time_delta_to_relative_time_seconds", + "tlist", + "tlist_grouped", + "upgrade_temporal_database", +] diff --git a/python/grass/temporal/abstract_dataset.py b/python/grass/temporal/abstract_dataset.py index 6df2fa1f81e..b8e0856ca77 100644 --- a/python/grass/temporal/abstract_dataset.py +++ b/python/grass/temporal/abstract_dataset.py @@ -29,12 +29,12 @@ class AbstractDataset( __metaclass__ = ABCMeta - def __init__(self): + def __init__(self) -> None: SpatialTopologyDatasetConnector.__init__(self) TemporalTopologyDatasetConnector.__init__(self) self.msgr = get_tgis_message_interface() - def reset_topology(self): + def reset_topology(self) -> None: """Reset any information about temporal topology""" self.reset_spatial_topology() @@ -88,12 +88,12 @@ def get_number_of_relations(self): return None - def set_topology_build_true(self): + def set_topology_build_true(self) -> None: """Use this method when the spatio-temporal topology was build""" self.set_spatial_topology_build_true() self.set_temporal_topology_build_true() - def set_topology_build_false(self): + def set_topology_build_false(self) -> None: """Use this method when the spatio-temporal topology was not build""" self.set_spatial_topology_build_false() self.set_temporal_topology_build_false() @@ -104,19 +104,18 @@ def is_topology_build(self): :return: A dictionary with "spatial" and "temporal" as keys that have boolean values """ - d = {} - d["spatial"] = self.is_spatial_topology_build() - d["temporal"] = self.is_temporal_topology_build() + return { + "spatial": self.is_spatial_topology_build(), + "temporal": self.is_temporal_topology_build(), + } - return d - - def print_topology_info(self): + def print_topology_info(self) -> None: if self.is_temporal_topology_build(): self.print_temporal_topology_info() if self.is_spatial_topology_build(): self.print_spatial_topology_info() - def print_topology_shell_info(self): + def print_topology_shell_info(self) -> None: if self.is_temporal_topology_build(): self.print_temporal_topology_shell_info() if self.is_spatial_topology_build(): @@ -223,7 +222,7 @@ def print_shell_info(self): def print_self(self): """Print the content of the internal structure to stdout""" - def set_id(self, ident): + def set_id(self, ident) -> None: """Set the identifier of the dataset""" self.base.set_id(ident) self.temporal_extent.set_id(ident) @@ -351,7 +350,7 @@ def get_spatial_extent(self): """Return the spatial extent""" return self.spatial_extent - def select(self, dbif=None, mapset=None): + def select(self, dbif=None, mapset=None) -> None: """Select temporal dataset entry from database and fill the internal structure @@ -392,7 +391,7 @@ def is_in_db(self, dbif=None, mapset=None): def delete(self): """Delete dataset from database if it exists""" - def insert(self, dbif=None, execute=True): + def insert(self, dbif=None, execute: bool = True): """Insert dataset into database :param dbif: The database interface to be used @@ -434,7 +433,7 @@ def insert(self, dbif=None, execute=True): dbif.close() return statement - def update(self, dbif=None, execute=True, ident=None): + def update(self, dbif=None, execute: bool = True, ident=None): """Update the dataset entry in the database from the internal structure excluding None variables @@ -468,7 +467,7 @@ def update(self, dbif=None, execute=True, ident=None): dbif.close() return statement - def update_all(self, dbif=None, execute=True, ident=None): + def update_all(self, dbif=None, execute: bool = True, ident=None): """Update the dataset entry in the database from the internal structure and include None variables. @@ -589,7 +588,7 @@ class AbstractDatasetComparisonKeyStartTime: sorted_map_list = sorted(map_list, key=AbstractDatasetComparisonKeyStartTime) """ - def __init__(self, obj, *args): + def __init__(self, obj, *args) -> None: self.obj = obj def __lt__(self, other): @@ -641,7 +640,7 @@ class AbstractDatasetComparisonKeyEndTime: sorted_map_list = sorted(map_list, key=AbstractDatasetComparisonKeyEndTime) """ - def __init__(self, obj, *args): + def __init__(self, obj, *args) -> None: self.obj = obj def __lt__(self, other): diff --git a/python/grass/temporal/abstract_map_dataset.py b/python/grass/temporal/abstract_map_dataset.py index f6bec2bfef3..3b9cf272f0c 100644 --- a/python/grass/temporal/abstract_map_dataset.py +++ b/python/grass/temporal/abstract_map_dataset.py @@ -57,7 +57,7 @@ class AbstractMapDataset(AbstractDataset): __metaclass__ = ABCMeta - def __init__(self): + def __init__(self) -> None: AbstractDataset.__init__(self) self.ciface = get_tgis_c_library_interface() @@ -84,7 +84,8 @@ def check_resolution_with_current_region(self): :return: "finer" or "coarser" """ - raise ImplementationError("This method must be implemented in the subclasses") + msg = "This method must be implemented in the subclasses" + raise ImplementationError(msg) @abstractmethod def has_grass_timestamp(self): @@ -167,7 +168,7 @@ def get_map_id(self): return self.base.get_map_id() @staticmethod - def split_name(name, layer=None, mapset=None): + def split_name(name: str, layer=None, mapset=None): """Convenient method to split a map name into three potentially contained parts: map name, map layer and mapset. For the layer and mapset, default keyword arguments can be given if not present in @@ -194,9 +195,9 @@ def split_name(name, layer=None, mapset=None): return name, layer, mapset @staticmethod - def build_id_from_search_path(name, element): + def build_id_from_search_path(name: str, element) -> str: """Convenient method to build the unique identifier while - checking the current seach path for the correct mapset. + checking the current search path for the correct mapset. Existing mapset definitions in the name string will be reused. @@ -225,7 +226,7 @@ def build_id_from_search_path(name, element): else: gs.fatal( _( - "Map <{map_name}> of element tpye '{element}' not found on \ + "Map <{map_name}> of element type '{element}' not found on \ search path" ).format(element=element, map_name=name) ) @@ -235,7 +236,7 @@ def build_id_from_search_path(name, element): return f"{name}@{mapset}" @staticmethod - def build_id(name, mapset, layer=None): + def build_id(name: str, mapset, layer=None) -> str: """Convenient method to build the unique identifier Existing layer and mapset definitions in the name @@ -266,7 +267,7 @@ def get_layer(self): """ return self.base.get_layer() - def print_self(self): + def print_self(self) -> None: """Print the content of the internal structure to stdout""" self.base.print_self() self.temporal_extent.print_self() @@ -274,7 +275,7 @@ def print_self(self): self.metadata.print_self() self.stds_register.print_self() - def print_info(self): + def print_info(self) -> None: """Print information about this object in human readable style""" if self.get_type() == "raster": @@ -322,7 +323,7 @@ def print_info(self): " +----------------------------------------------------------------------------+" # noqa: E501 ) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this object in shell style""" self.base.print_shell_info() self.temporal_extent.print_shell_info() @@ -343,7 +344,7 @@ def print_shell_info(self): if self.is_topology_build(): self.print_topology_shell_info() - def insert(self, dbif=None, execute=True): + def insert(self, dbif=None, execute: bool = True): """Insert the map content into the database from the internal structure @@ -364,7 +365,7 @@ def insert(self, dbif=None, execute=True): self.write_timestamp_to_grass() return AbstractDataset.insert(self, dbif=dbif, execute=execute) - def update(self, dbif=None, execute=True): + def update(self, dbif=None, execute: bool = True): """Update the map content in the database from the internal structure excluding None variables @@ -383,7 +384,7 @@ def update(self, dbif=None, execute=True): self.write_timestamp_to_grass() return AbstractDataset.update(self, dbif, execute) - def update_all(self, dbif=None, execute=True): + def update_all(self, dbif=None, execute: bool = True): """Update the map content in the database from the internal structure including None variables @@ -403,15 +404,15 @@ def update_all(self, dbif=None, execute=True): self.write_timestamp_to_grass() return AbstractDataset.update_all(self, dbif, execute) - def set_time_to_absolute(self): + def set_time_to_absolute(self) -> None: """Set the temporal type to absolute""" self.base.set_ttype("absolute") - def set_time_to_relative(self): + def set_time_to_relative(self) -> None: """Set the temporal type to relative""" self.base.set_ttype("relative") - def set_absolute_time(self, start_time, end_time=None): + def set_absolute_time(self, start_time, end_time=None) -> bool: """Set the absolute time with start time and end time The end time is optional and must be set to None in case of time @@ -501,7 +502,7 @@ def set_absolute_time(self, start_time, end_time=None): return True - def update_absolute_time(self, start_time, end_time=None, dbif=None): + def update_absolute_time(self, start_time, end_time=None, dbif=None) -> None: """Update the absolute time The end time is optional and must be set to None in case of time @@ -543,7 +544,7 @@ def update_absolute_time(self, start_time, end_time=None, dbif=None): if get_enable_timestamp_write(): self.write_timestamp_to_grass() - def set_relative_time(self, start_time, end_time, unit): + def set_relative_time(self, start_time, end_time, unit) -> bool: """Set the relative time interval The end time is optional and must be set to None in case of time @@ -624,7 +625,7 @@ def set_relative_time(self, start_time, end_time, unit): return True - def update_relative_time(self, start_time, end_time, unit, dbif=None): + def update_relative_time(self, start_time, end_time, unit, dbif=None) -> None: """Update the relative time interval The end time is optional and must be set to None in case of time @@ -664,7 +665,7 @@ def update_relative_time(self, start_time, end_time, unit, dbif=None): if get_enable_timestamp_write(): self.write_timestamp_to_grass() - def set_temporal_extent(self, extent): + def set_temporal_extent(self, extent) -> None: """Convenient method to set the temporal extent from a temporal extent object @@ -721,7 +722,7 @@ def set_temporal_extent(self, extent): self.set_absolute_time(start, end) - def temporal_buffer(self, increment, update=False, dbif=None): + def temporal_buffer(self, increment, update: bool = False, dbif=None) -> None: """Create a temporal buffer based on an increment For absolute time the increment must be a string of type "integer @@ -827,7 +828,9 @@ def temporal_buffer(self, increment, update=False, dbif=None): else: self.set_relative_time(new_start, new_end, unit) - def set_spatial_extent_from_values(self, north, south, east, west, top=0, bottom=0): + def set_spatial_extent_from_values( + self, north, south, east, west, top=0, bottom=0 + ) -> None: """Set the spatial extent of the map from values This method only modifies this object and does not commit @@ -844,7 +847,7 @@ def set_spatial_extent_from_values(self, north, south, east, west, top=0, bottom north, south, east, west, top, bottom ) - def set_spatial_extent(self, spatial_extent): + def set_spatial_extent(self, spatial_extent) -> None: """Set the spatial extent of the map This method only modifies this object and does not commit @@ -868,7 +871,7 @@ def set_spatial_extent(self, spatial_extent): """ self.spatial_extent.set_spatial_extent(spatial_extent) - def spatial_buffer(self, size, update=False, dbif=None): + def spatial_buffer(self, size, update: bool = False, dbif=None) -> None: """Buffer the spatial extent by a given size in all spatial directions. @@ -900,7 +903,7 @@ def spatial_buffer(self, size, update=False, dbif=None): if update: self.spatial_extent.update(dbif) - def spatial_buffer_2d(self, size, update=False, dbif=None): + def spatial_buffer_2d(self, size, update: bool = False, dbif=None) -> None: """Buffer the spatial extent by a given size in 2d spatial directions. @@ -930,7 +933,7 @@ def spatial_buffer_2d(self, size, update=False, dbif=None): if update: self.spatial_extent.update(dbif) - def check_for_correct_time(self): + def check_for_correct_time(self) -> bool: """Check for correct time :return: True in case of success, False otherwise @@ -970,7 +973,7 @@ def check_for_correct_time(self): return True - def delete(self, dbif=None, update=True, execute=True): + def delete(self, dbif=None, update: bool = True, execute: bool = True): """Delete a map entry from database if it exists Remove dependent entries: @@ -1032,7 +1035,7 @@ def delete(self, dbif=None, update=True, execute=True): return statement - def unregister(self, dbif=None, update=True, execute=True): + def unregister(self, dbif=None, update: bool = True, execute: bool = True): """Remove the map entry in each space time dataset in which this map is registered @@ -1123,7 +1126,7 @@ def get_registered_stds(self, dbif=None, mapset=None): # this fn should not be in a class for maps, # but instead in a class for stds: AbstractSpaceTimeDataset ? - def add_stds_to_register(self, stds_id, dbif=None, execute=True): + def add_stds_to_register(self, stds_id, dbif=None, execute: bool = True): """Add a new space time dataset to the register :param stds_id: The id of the space time dataset to be registered @@ -1172,7 +1175,7 @@ def add_stds_to_register(self, stds_id, dbif=None, execute=True): return statement - def remove_stds_from_register(self, stds_id, dbif=None, execute=True): + def remove_stds_from_register(self, stds_id, dbif=None, execute: bool = True): """Remove a space time dataset from the register :param stds_id: The id of the space time dataset to removed from @@ -1220,7 +1223,7 @@ def remove_stds_from_register(self, stds_id, dbif=None, execute=True): return statement - def read_semantic_label_from_grass(self): + def read_semantic_label_from_grass(self) -> None: """Read the band identifier of this map from the map metadata in the GRASS file system based spatial database and set the internal band identifier that should be insert/updated @@ -1230,7 +1233,7 @@ def read_semantic_label_from_grass(self): silently pass. """ - def set_semantic_label(self, semantic_label): + def set_semantic_label(self, semantic_label) -> None: """Set semantic label identifier Currently only implemented in RasterDataset. Otherwise diff --git a/python/grass/temporal/abstract_space_time_dataset.py b/python/grass/temporal/abstract_space_time_dataset.py index 8140a2da261..541fc8dd564 100644 --- a/python/grass/temporal/abstract_space_time_dataset.py +++ b/python/grass/temporal/abstract_space_time_dataset.py @@ -10,6 +10,8 @@ class that is the base class for all space time datasets. :authors: Soeren Gebbert """ +from __future__ import annotations + import copy import os import sys @@ -17,6 +19,7 @@ class that is the base class for all space time datasets. from abc import ABCMeta, abstractmethod from datetime import datetime from pathlib import Path +from grass.exceptions import FatalError from .abstract_dataset import AbstractDataset, AbstractDatasetComparisonKeyStartTime from .core import ( @@ -59,7 +62,7 @@ class AbstractSpaceTimeDataset(AbstractDataset): __metaclass__ = ABCMeta - def __init__(self, ident): + def __init__(self, ident) -> None: AbstractDataset.__init__(self) self.reset(ident) self.map_counter = 0 @@ -67,7 +70,7 @@ def __init__(self, ident): # SpaceTimeRasterDataset related only self.semantic_label = None - def get_name(self, semantic_label=True): + def get_name(self, semantic_label: bool = True): """Get dataset name including semantic label filter if enabled. :param bool semantic_label: True to return dataset name @@ -128,14 +131,14 @@ def set_map_register(self, name): :param name: The name of the register table """ - def print_self(self): + def print_self(self) -> None: """Print the content of the internal structure to stdout""" self.base.print_self() self.temporal_extent.print_self() self.spatial_extent.print_self() self.metadata.print_self() - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" if self.get_type() == "strds": @@ -167,22 +170,26 @@ def print_info(self): " +----------------------------------------------------------------------------+" # noqa: E501 ) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" self.base.print_shell_info() self.temporal_extent.print_shell_info() self.spatial_extent.print_shell_info() self.metadata.print_shell_info() - def print_history(self): + def print_history(self) -> None: """Print history information about this class in human readable shell style """ self.metadata.print_history() def set_initial_values( - self, temporal_type, semantic_type=None, title=None, description=None - ): + self, + temporal_type, + semantic_type=None, + title=None, + description: str | None = None, + ) -> None: """Set the initial values of the space time dataset In addition the command creation string is generated @@ -213,7 +220,7 @@ def set_initial_values( self.metadata.set_description(description) self.metadata.set_command(self.create_command_string()) - def set_aggregation_type(self, aggregation_type): + def set_aggregation_type(self, aggregation_type) -> None: """Set the aggregation type of the space time dataset :param aggregation_type: The aggregation type of the space time @@ -221,7 +228,7 @@ def set_aggregation_type(self, aggregation_type): """ self.metadata.set_aggregation_type(aggregation_type) - def update_command_string(self, dbif=None): + def update_command_string(self, dbif=None) -> None: """Append the current command string to any existing command string in the metadata class and calls metadata update @@ -311,7 +318,7 @@ def get_granularity(self): return self.temporal_extent.get_granularity() - def set_granularity(self, granularity): + def set_granularity(self, granularity) -> None: """Set the granularity The granularity is usually computed by the space time dataset at @@ -343,7 +350,7 @@ def set_granularity(self, granularity): self.temporal_extent.set_granularity(granularity) - def set_relative_time_unit(self, unit): + def set_relative_time_unit(self, unit) -> None: """Set the relative time unit which may be of type: years, months, days, hours, minutes or seconds @@ -363,7 +370,7 @@ def set_relative_time_unit(self, unit): self.msgr.fatal(_("Unsupported temporal unit: %s") % (unit)) self.relative_time.set_unit(unit) - def insert(self, dbif=None, execute=True): + def insert(self, dbif=None, execute: bool = True): """Insert the space time dataset content into the database from the internal structure @@ -500,7 +507,9 @@ def count_gaps(self, maps=None, dbif=None): return gaps - def print_spatio_temporal_relationships(self, maps=None, spatial=None, dbif=None): + def print_spatio_temporal_relationships( + self, maps=None, spatial=None, dbif=None + ) -> None: """Print the spatio-temporal relationships for each map of the space time dataset or for each map of the optional list of maps @@ -541,7 +550,7 @@ def count_temporal_relations(self, maps=None, dbif=None): return count_temporal_topology_relationships(maps1=maps, dbif=dbif) - def check_temporal_topology(self, maps=None, dbif=None): + def check_temporal_topology(self, maps=None, dbif=None) -> bool: """Check the temporal topology of all maps of the current space time dataset or of an optional list of maps @@ -613,7 +622,7 @@ def check_temporal_topology(self, maps=None, dbif=None): return True - def sample_by_dataset(self, stds, method=None, spatial=False, dbif=None): + def sample_by_dataset(self, stds, method=None, spatial: bool = False, dbif=None): """Sample this space time dataset with the temporal topology of a second space time dataset @@ -820,7 +829,9 @@ def sample_by_dataset(self, stds, method=None, spatial=False, dbif=None): return obj_list - def sample_by_dataset_sql(self, stds, method=None, spatial=False, dbif=None): + def sample_by_dataset_sql( + self, stds, method=None, spatial: bool = False, dbif=None + ): """Sample this space time dataset with the temporal topology of a second space time dataset using SQL queries. @@ -1996,7 +2007,7 @@ def shift_map_list(maps, gran): return maps - def shift(self, gran, dbif=None): + def shift(self, gran, dbif=None) -> bool: """Temporally shift each registered map with the provided granularity :param gran: The granularity to be used for shifting @@ -2173,7 +2184,7 @@ def snap_map_list(maps): return maps - def snap(self, dbif=None): + def snap(self, dbif=None) -> None: """For each registered map snap the end time to the start time of its temporal nearest neighbor in the future @@ -2231,7 +2242,7 @@ def snap(self, dbif=None): if connection_state_changed: dbif.close() - def _update_map_timestamps(self, maps, date_list, dbif): + def _update_map_timestamps(self, maps, date_list, dbif) -> None: """Update the timestamps of maps with the start and end time stored in the date_list. @@ -2278,7 +2289,7 @@ def _update_map_timestamps(self, maps, date_list, dbif): ds.select(dbif) ds.update_from_registered_maps(dbif) - def rename(self, ident, dbif=None): + def rename(self, ident, dbif=None) -> None: """Rename the space time dataset This method renames the space time dataset, the map register table @@ -2356,7 +2367,7 @@ def rename(self, ident, dbif=None): if connection_state_changed: dbif.close() - def delete(self, dbif=None, execute=True): + def delete(self, dbif=None, execute: bool = True): """Delete a space time dataset from the temporal database This method removes the space time dataset from the temporal @@ -2453,7 +2464,9 @@ def is_map_registered(self, map_id, dbif=None): try: dbif.execute(sql, (map_id,), mapset=self.base.mapset) row = dbif.fetchone(mapset=self.base.mapset) - except: + except FatalError: + raise + except Exception: self.msgr.warning(_("Error in register table request")) raise @@ -2465,7 +2478,7 @@ def is_map_registered(self, map_id, dbif=None): return is_registered - def register_map(self, map, dbif=None): + def register_map(self, map, dbif=None) -> bool: """Register a map in the space time dataset. This method takes care of the registration of a map @@ -2548,7 +2561,7 @@ def register_map(self, map, dbif=None): stds_register_table = self.get_map_register() stds_ttype = self.get_temporal_type() - # The gathered SQL statemets are stroed here + # The gathered SQL statements are stored here statement = "" # Check temporal types @@ -2664,7 +2677,7 @@ def register_map(self, map, dbif=None): return True - def unregister_map(self, map, dbif=None, execute=True): + def unregister_map(self, map, dbif=None, execute: bool = True): """Unregister a map from the space time dataset. This method takes care of the un-registration of a map @@ -2749,7 +2762,7 @@ def unregister_map(self, map, dbif=None, execute=True): return statement - def update_from_registered_maps(self, dbif=None): + def update_from_registered_maps(self, dbif=None) -> None: """This methods updates the modification time, the spatial and temporal extent as well as type specific metadata. It should always been called after maps are registered or unregistered/deleted from diff --git a/python/grass/temporal/aggregation.py b/python/grass/temporal/aggregation.py index 354600855e0..69537ab0e49 100644 --- a/python/grass/temporal/aggregation.py +++ b/python/grass/temporal/aggregation.py @@ -115,7 +115,7 @@ def collect_map_names(sp, dbif, start, end, sampling): def aggregate_raster_maps( - inputs, base, start, end, count, method, register_null, dbif, offset=0 + inputs, base, start, end, count: int, method, register_null, dbif, offset: int = 0 ): """Aggregate a list of raster input maps with r.series @@ -219,13 +219,13 @@ def aggregate_by_topology( topo_list, basename, time_suffix, - offset=0, + offset: int = 0, method="average", - nprocs=1, + nprocs: int = 1, spatial=None, dbif=None, - overwrite=False, - file_limit=1000, + overwrite: bool = False, + file_limit: int = 1000, ): """Aggregate a list of raster input maps with r.series diff --git a/python/grass/temporal/base.py b/python/grass/temporal/base.py index ebb4b715e29..4c4378921a6 100644 --- a/python/grass/temporal/base.py +++ b/python/grass/temporal/base.py @@ -25,6 +25,8 @@ :author: Soeren Gebbert """ +from __future__ import annotations + from datetime import datetime from .core import ( @@ -38,7 +40,7 @@ class DictSQLSerializer: - def __init__(self): + def __init__(self) -> None: self.D = {} self.dbmi_paramstyle = get_tgis_dbmi_paramstyle() @@ -174,7 +176,7 @@ def serialize(self, type, table, where=None): return sql, tuple(args) - def deserialize(self, row): + def deserialize(self, row) -> None: """Convert the content of the dbmi dictionary like row into the internal dictionary @@ -184,11 +186,11 @@ def deserialize(self, row): for key in row.keys(): self.D[key] = row[key] - def clear(self): + def clear(self) -> None: """Initialize the internal storage""" self.D = {} - def print_self(self): + def print_self(self) -> None: """Print the content of the internal dictionary to stdout""" print(self.D) @@ -243,7 +245,7 @@ class SQLDatabaseInterface(DictSQLSerializer): """ # noqa: E501 - def __init__(self, table=None, ident=None): + def __init__(self, table=None, ident=None) -> None: """Constructor of this class :param table: The name of the table @@ -280,7 +282,7 @@ def get_delete_statement(self): + "';\n" ) - def delete(self, dbif=None): + def delete(self, dbif=None) -> None: """Delete the entry of this object from the temporal database :param dbif: The database interface to be used, @@ -367,7 +369,7 @@ def get_select_statement_mogrified(self, dbif=None): self.get_select_statement(), mapset=self.mapset ) - def select(self, dbif=None, mapset=None): + def select(self, dbif=None, mapset=None) -> bool: """Select the content from the temporal database and store it in the internal dictionary structure @@ -432,7 +434,7 @@ def get_insert_statement_mogrified(self, dbif=None): mapset = get_current_mapset() return dbif.mogrify_sql_statement(self.get_insert_statement(), mapset=mapset) - def insert(self, dbif=None): + def insert(self, dbif=None) -> None: """Serialize the content of this object and store it in the temporal database using the internal identifier @@ -489,7 +491,7 @@ def get_update_statement_mogrified(self, dbif=None, ident=None): self.get_update_statement(ident), mapset=mapset ) - def update(self, dbif=None, ident=None): + def update(self, dbif=None, ident=None) -> None: """Serialize the content of this object and update it in the temporal database using the internal identifier @@ -547,7 +549,7 @@ def get_update_all_statement_mogrified(self, dbif=None, ident=None): return dbif.mogrify_sql_statement(self.get_update_all_statement(ident)) - def update_all(self, dbif=None, ident=None): + def update_all(self, dbif=None, ident=None) -> None: """Serialize the content of this object, including None objects, and update it in the temporal database using the internal identifier @@ -627,12 +629,12 @@ def __init__( self, table=None, ident=None, - name=None, + name: str | None = None, mapset=None, creator=None, ctime=None, ttype=None, - ): + ) -> None: """Constructor :param table: The name of the temporal database table @@ -665,7 +667,7 @@ def __init__( self.set_ctime(ctime) self.set_ttype(ttype) - def set_id(self, ident): + def set_id(self, ident) -> None: """Convenient method to set the unique identifier (primary key) :param ident: The unique identifier must be a combination @@ -688,21 +690,21 @@ def set_id(self, ident): self.set_layer(layer) self.set_name(name) - def set_name(self, name): + def set_name(self, name) -> None: """Set the name of the dataset :param name: The name of the dataset """ self.D["name"] = name - def set_mapset(self, mapset): + def set_mapset(self, mapset) -> None: """Set the mapset of the dataset :param mapset: The name of the mapset in which this dataset is stored """ self.D["mapset"] = mapset - def set_layer(self, layer): + def set_layer(self, layer) -> None: """Convenient method to set the layer of the map (part of primary key) Layer are supported for vector maps @@ -711,14 +713,14 @@ def set_layer(self, layer): """ self.D["layer"] = layer - def set_creator(self, creator): + def set_creator(self, creator) -> None: """Set the creator of the dataset :param creator: The name of the creator """ self.D["creator"] = creator - def set_ctime(self, ctime=None): + def set_ctime(self, ctime=None) -> None: """Set the creation time of the dataset, if nothing set the current time is used @@ -729,7 +731,7 @@ def set_ctime(self, ctime=None): else: self.D["creation_time"] = ctime - def set_ttype(self, ttype): + def set_ttype(self, ttype) -> None: """Set the temporal type of the dataset: absolute or relative, if nothing set absolute time will assumed @@ -818,7 +820,7 @@ def get_ttype(self): ttype = property(fget=get_ttype, fset=set_ttype) creator = property(fget=get_creator, fset=set_creator) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" # 0123456789012345678901234567890 print( @@ -833,7 +835,7 @@ def print_info(self): print(" | Temporal type: ............. " + str(self.get_ttype())) print(" | Creation time: ............. " + str(self.get_ctime())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" print("id=" + str(self.get_id())) print("name=" + str(self.get_name())) @@ -854,12 +856,12 @@ class RasterBase(DatasetBase): def __init__( self, ident=None, - name=None, + name: str | None = None, mapset=None, creator=None, creation_time=None, temporal_type=None, - ): + ) -> None: DatasetBase.__init__( self, "raster_base", @@ -878,12 +880,12 @@ class Raster3DBase(DatasetBase): def __init__( self, ident=None, - name=None, + name: str | None = None, mapset=None, creator=None, creation_time=None, temporal_type=None, - ): + ) -> None: DatasetBase.__init__( self, "raster3d_base", @@ -902,13 +904,13 @@ class VectorBase(DatasetBase): def __init__( self, ident=None, - name=None, + name: str | None = None, mapset=None, layer=None, creator=None, creation_time=None, temporal_type=None, - ): + ) -> None: DatasetBase.__init__( self, "vector_base", @@ -984,24 +986,24 @@ def __init__( self, table=None, ident=None, - name=None, + name: str | None = None, mapset=None, semantic_type=None, creator=None, ctime=None, ttype=None, mtime=None, - ): + ) -> None: DatasetBase.__init__(self, table, ident, name, mapset, creator, ctime, ttype) self.set_semantic_type(semantic_type) self.set_mtime(mtime) - def set_semantic_type(self, semantic_type): + def set_semantic_type(self, semantic_type) -> None: """Set the semantic type of the space time dataset""" self.D["semantic_type"] = semantic_type - def set_mtime(self, mtime=None): + def set_mtime(self, mtime=None) -> None: """Set the modification time of the space time dataset, if nothing set the current time is used """ @@ -1030,14 +1032,14 @@ def get_mtime(self): semantic_type = property(fget=get_semantic_type, fset=set_semantic_type) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" DatasetBase.print_info(self) # 0123456789012345678901234567890 print(" | Modification time:.......... " + str(self.get_mtime())) print(" | Semantic type:.............. " + str(self.get_semantic_type())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" DatasetBase.print_shell_info(self) print("modification_time='{}'".format(str(self.get_mtime()))) @@ -1053,13 +1055,13 @@ class STRDSBase(STDSBase): def __init__( self, ident=None, - name=None, + name: str | None = None, mapset=None, semantic_type=None, creator=None, ctime=None, ttype=None, - ): + ) -> None: STDSBase.__init__( self, "strds_base", @@ -1079,13 +1081,13 @@ class STR3DSBase(STDSBase): def __init__( self, ident=None, - name=None, + name: str | None = None, mapset=None, semantic_type=None, creator=None, ctime=None, ttype=None, - ): + ) -> None: STDSBase.__init__( self, "str3ds_base", @@ -1105,13 +1107,13 @@ class STVDSBase(STDSBase): def __init__( self, ident=None, - name=None, + name: str | None = None, mapset=None, semantic_type=None, creator=None, ctime=None, ttype=None, - ): + ) -> None: STDSBase.__init__( self, "stvds_base", @@ -1145,7 +1147,7 @@ class AbstractSTDSRegister(SQLDatabaseInterface): """ - def __init__(self, table=None, ident=None, registered_stds=None): + def __init__(self, table=None, ident=None, registered_stds=None) -> None: """Constructor :param table: The name of the temporal database table @@ -1162,7 +1164,7 @@ def __init__(self, table=None, ident=None, registered_stds=None): self.set_id(ident) self.set_registered_stds(registered_stds) - def set_id(self, ident): + def set_id(self, ident) -> None: """Convenient method to set the unique identifier (primary key) :param ident: The unique identifier must be a combination @@ -1172,7 +1174,7 @@ def set_id(self, ident): self.ident = ident self.D["id"] = ident - def set_registered_stds(self, registered_stds): + def set_registered_stds(self, registered_stds) -> None: """Get the comma separated list of space time datasets ids in which this map is registered @@ -1211,7 +1213,7 @@ def get_registered_stds(self): class RasterSTDSRegister(AbstractSTDSRegister): """Time stamped raster map base information class""" - def __init__(self, ident=None, registered_stds=None): + def __init__(self, ident=None, registered_stds=None) -> None: AbstractSTDSRegister.__init__( self, "raster_stds_register", ident, registered_stds ) @@ -1220,7 +1222,7 @@ def __init__(self, ident=None, registered_stds=None): class Raster3DSTDSRegister(AbstractSTDSRegister): """Time stamped 3D raster map base information class""" - def __init__(self, ident=None, registered_stds=None): + def __init__(self, ident=None, registered_stds=None) -> None: AbstractSTDSRegister.__init__( self, "raster3d_stds_register", ident, registered_stds ) @@ -1229,7 +1231,7 @@ def __init__(self, ident=None, registered_stds=None): class VectorSTDSRegister(AbstractSTDSRegister): """Time stamped vector map base information class""" - def __init__(self, ident=None, registered_stds=None): + def __init__(self, ident=None, registered_stds=None) -> None: AbstractSTDSRegister.__init__( self, "vector_stds_register", ident, registered_stds ) diff --git a/python/grass/temporal/c_libraries_interface.py b/python/grass/temporal/c_libraries_interface.py index 2b7f9f033f3..596bad50129 100644 --- a/python/grass/temporal/c_libraries_interface.py +++ b/python/grass/temporal/c_libraries_interface.py @@ -17,7 +17,7 @@ from ctypes import CFUNCTYPE, POINTER, byref, c_int, c_void_p, cast from datetime import datetime from multiprocessing import Lock, Pipe, Process -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Literal import grass.lib.date as libdate import grass.lib.gis as libgis @@ -33,9 +33,13 @@ from grass.script.utils import encode if TYPE_CHECKING: + from collections.abc import Callable from multiprocessing.connection import Connection from multiprocessing.synchronize import _LockLike + +logger = logging.getLogger(__name__) + ############################################################################### @@ -70,7 +74,7 @@ class RPCDefs: ############################################################################### -def _read_map_full_info(lock: _LockLike, conn: Connection, data): +def _read_map_full_info(lock: _LockLike, conn: Connection, data) -> None: """Read full map specific metadata from the spatial database using PyGRASS functions. @@ -198,7 +202,7 @@ def _read_vector_full_info(name, mapset, layer=None): return info -def _fatal_error(lock: _LockLike, conn: Connection, data): +def _fatal_error(lock: _LockLike, conn: Connection, data) -> None: """Calls G_fatal_error()""" libgis.G_fatal_error("Fatal Error in C library server") @@ -206,7 +210,7 @@ def _fatal_error(lock: _LockLike, conn: Connection, data): ############################################################################### -def _get_mapset(lock: _LockLike, conn: Connection, data): +def _get_mapset(lock: _LockLike, conn: Connection, data) -> None: """Return the current mapset :param lock: A multiprocessing.Lock instance @@ -223,7 +227,7 @@ def _get_mapset(lock: _LockLike, conn: Connection, data): ############################################################################### -def _get_location(lock: _LockLike, conn: Connection, data): +def _get_location(lock: _LockLike, conn: Connection, data) -> None: """Return the current location :param lock: A multiprocessing.Lock instance @@ -240,7 +244,7 @@ def _get_location(lock: _LockLike, conn: Connection, data): ############################################################################### -def _get_gisdbase(lock: _LockLike, conn: Connection, data): +def _get_gisdbase(lock: _LockLike, conn: Connection, data) -> None: """Return the current gisdatabase :param lock: A multiprocessing.Lock instance @@ -257,7 +261,7 @@ def _get_gisdbase(lock: _LockLike, conn: Connection, data): ############################################################################### -def _get_driver_name(lock: _LockLike, conn: Connection, data): +def _get_driver_name(lock: _LockLike, conn: Connection, data) -> None: """Return the temporal database driver of a specific mapset :param lock: A multiprocessing.Lock instance @@ -276,7 +280,7 @@ def _get_driver_name(lock: _LockLike, conn: Connection, data): ############################################################################### -def _get_database_name(lock: _LockLike, conn: Connection, data): +def _get_database_name(lock: _LockLike, conn: Connection, data) -> None: """Return the temporal database name of a specific mapset :param lock: A multiprocessing.Lock instance @@ -306,7 +310,7 @@ def _get_database_name(lock: _LockLike, conn: Connection, data): ############################################################################### -def _available_mapsets(lock: _LockLike, conn: Connection, data): +def _available_mapsets(lock: _LockLike, conn: Connection, data) -> None: """Return all available mapsets the user can access as a list of strings :param lock: A multiprocessing.Lock instance @@ -363,7 +367,7 @@ def _available_mapsets(lock: _LockLike, conn: Connection, data): ############################################################################### -def _has_timestamp(lock: _LockLike, conn: Connection, data): +def _has_timestamp(lock: _LockLike, conn: Connection, data) -> None: """Check if the file based GRASS timestamp is present and send True or False using the provided pipe. @@ -396,7 +400,7 @@ def _has_timestamp(lock: _LockLike, conn: Connection, data): ############################################################################### -def _read_timestamp(lock: _LockLike, conn: Connection, data): +def _read_timestamp(lock: _LockLike, conn: Connection, data) -> None: """Read the file based GRASS timestamp and send the result using the provided pipe. @@ -472,8 +476,9 @@ def _write_timestamp(lock: _LockLike, conn: Connection, data): check = libgis.G_scan_timestamp(byref(ts), timestring) if check != 1: - logging.error( - "Unable to convert the timestamp: {timestring}", timestring=timestring + logger.error( + "Unable to convert the timestamp: %(timestring)s", + {"timestring": timestring}, ) return -2 @@ -490,7 +495,7 @@ def _write_timestamp(lock: _LockLike, conn: Connection, data): ############################################################################### -def _remove_timestamp(lock: _LockLike, conn: Connection, data): +def _remove_timestamp(lock: _LockLike, conn: Connection, data) -> None: """Remove the file based GRASS timestamp the return values of the called C-functions using the provided pipe. @@ -554,9 +559,9 @@ def _read_semantic_label(lock: _LockLike, conn: Connection, data): if ret: semantic_label = decode(ret) else: - logging.error( - "Unable to read semantic label. Unsupported map type {maptype}", - maptype=maptype, + logger.error( + "Unable to read semantic label. Unsupported map type %(maptype)s", + {"maptype": maptype}, ) return -1 finally: @@ -591,9 +596,9 @@ def _write_semantic_label(lock: _LockLike, conn: Connection, data): raise ValueError(_("Invalid semantic label")) libraster.Rast_write_semantic_label(name, semantic_label) else: - logging.error( - "Unable to write semantic label. Unsupported map type {maptype}", - maptype=maptype, + logger.error( + "Unable to write semantic label. Unsupported map type %(maptype)s", + {"maptype": maptype}, ) return -2 finally: @@ -625,9 +630,9 @@ def _remove_semantic_label(lock: _LockLike, conn: Connection, data): if maptype == RPCDefs.TYPE_RASTER: check = libgis.G_remove_misc("cell_misc", "semantic_label", name) else: - logging.error( - "Unable to remove semantic label. Unsupported map type {maptype}", - maptype=maptype, + logger.error( + "Unable to remove semantic label. Unsupported map type %(maptype)s", + {"maptype": maptype}, ) return -2 finally: @@ -637,7 +642,7 @@ def _remove_semantic_label(lock: _LockLike, conn: Connection, data): ############################################################################### -def _map_exists(lock: _LockLike, conn: Connection, data): +def _map_exists(lock: _LockLike, conn: Connection, data) -> None: """Check if a map exists in the spatial database The value to be send via pipe is True in case the map exists and False @@ -670,7 +675,7 @@ def _map_exists(lock: _LockLike, conn: Connection, data): ############################################################################### -def _read_map_info(lock: _LockLike, conn: Connection, data): +def _read_map_info(lock: _LockLike, conn: Connection, data) -> None: """Read map specific metadata from the spatial database using C-library functions @@ -743,11 +748,11 @@ def _read_raster_info(name, mapset): libraster.Rast_init_fp_range(byref(range)) ret = libraster.Rast_read_fp_range(name, mapset, byref(range)) if ret < 0: - logging.warning(_("Unable to read range file")) + logger.warning(_("Unable to read range file")) kvp["min"] = None kvp["max"] = None elif ret == 2: - logging.info(_("Raster range file is empty")) + logger.info(_("Raster range file is empty")) kvp["min"] = None kvp["max"] = None else: @@ -761,11 +766,11 @@ def _read_raster_info(name, mapset): libraster.Rast_init_range(byref(range)) ret = libraster.Rast_read_range(name, mapset, byref(range)) if ret < 0: - logging.warning(_("Unable to read range file")) + logger.warning(_("Unable to read range file")) kvp["min"] = None kvp["max"] = None elif ret == 2: - logging.info(_("Raster range file is empty")) + logger.info(_("Raster range file is empty")) kvp["min"] = None kvp["max"] = None else: @@ -831,7 +836,7 @@ def _read_raster3d_info(name, mapset): ) if not g3map: - logging.error(_("Unable to open 3D raster map <%s>"), (name)) + logger.error(_("Unable to open 3D raster map <%s>"), name) return None maptype = libraster3d.Rast3d_file_type_map(g3map) @@ -846,7 +851,7 @@ def _read_raster3d_info(name, mapset): max = libgis.DCELL() ret = libraster3d.Rast3d_range_load(g3map) if not ret: - logging.error(_("Unable to load range of 3D raster map <%s>"), (name)) + logger.error(_("Unable to load range of 3D raster map <%s>"), name) return None libraster3d.Rast3d_range_min_max(g3map, byref(min), byref(max)) @@ -860,7 +865,7 @@ def _read_raster3d_info(name, mapset): kvp["max"] = float(max.value) if not libraster3d.Rast3d_close(g3map): - logging.error(_("Unable to close 3D raster map <%s>"), (name)) + logger.error(_("Unable to close 3D raster map <%s>"), name) return None return kvp @@ -902,9 +907,9 @@ def _read_vector_info(name, mapset): libvector.Vect_set_open_level(1) # no topology with_topo = False if libvector.Vect_open_old2(byref(Map), name, mapset, "1") < 1: - logging.error( + logger.error( _("Unable to open vector map <%s>"), - (libvector.Vect_get_full_name(byref(Map))), + libvector.Vect_get_full_name(byref(Map)), ) return None @@ -978,7 +983,7 @@ def _read_vector_info(name, mapset): ############################################################################### -def _read_map_history(lock: _LockLike, conn: Connection, data): +def _read_map_history(lock: _LockLike, conn: Connection, data) -> None: """Read map history from the spatial database using C-library functions :param lock: A multiprocessing.Lock instance @@ -1026,7 +1031,7 @@ def _read_raster_history(name, mapset): hist = libraster.History() ret = libraster.Rast_read_history(name, mapset, byref(hist)) if ret < 0: - logging.warning(_("Unable to read history file")) + logger.warning(_("Unable to read history file")) return None kvp["creation_time"] = decode( libraster.Rast_get_history(byref(hist), libraster.HIST_MAPID) @@ -1063,7 +1068,7 @@ def _read_raster3d_history(name, mapset): hist = libraster.History() ret = libraster3d.Rast3d_read_history(name, mapset, byref(hist)) if ret < 0: - logging.warning(_("Unable to read history file")) + logger.warning(_("Unable to read history file")) return None kvp["creation_time"] = decode( libraster.Rast_get_history(byref(hist), libraster3d.HIST_MAPID) @@ -1198,7 +1203,7 @@ def _convert_timestamp_from_grass(ts): ############################################################################### -def _stop(lock: _LockLike, conn: Connection, data): +def _stop(lock: _LockLike, conn: Connection, data) -> None: libgis.G_debug(1, "Stop C-interface server") conn.close() lock.release() @@ -1208,7 +1213,7 @@ def _stop(lock: _LockLike, conn: Connection, data): ############################################################################### -def c_library_server(lock: _LockLike, conn: Connection): +def c_library_server(lock: _LockLike, conn: Connection) -> None: """The GRASS C-libraries server function designed to be a target for multiprocessing.Process @@ -1217,7 +1222,7 @@ def c_library_server(lock: _LockLike, conn: Connection): multiprocessing.Pipe """ - def error_handler(data): + def error_handler(data) -> None: """This function will be called in case of a fatal error in libgis""" # sys.stderr.write("Error handler was called\n") # We send an exception that will be handled in @@ -1235,8 +1240,10 @@ def error_handler(data): libgis.G_add_error_handler(cerror_handler, None) - # Crerate the function array - functions = [0] * 50 + # Create the function array + functions: list[ + Callable[[_LockLike, Connection, Any], Literal[-1, -2] | None] | int + ] = [0] * 50 functions[RPCDefs.STOP] = _stop functions[RPCDefs.HAS_TIMESTAMP] = _has_timestamp functions[RPCDefs.WRITE_TIMESTAMP] = _write_timestamp @@ -1490,10 +1497,10 @@ class CLibrariesInterface(RPCServerBase): """ # noqa: E501 - def __init__(self): + def __init__(self) -> None: RPCServerBase.__init__(self) - def start_server(self): + def start_server(self) -> None: self.client_conn, self.server_conn = Pipe(True) self.lock = Lock() self.server = Process( diff --git a/python/grass/temporal/core.py b/python/grass/temporal/core.py index 321c73d5408..c350c4fa458 100644 --- a/python/grass/temporal/core.py +++ b/python/grass/temporal/core.py @@ -30,6 +30,8 @@ :author: Soeren Gebbert """ +from __future__ import annotations + # import traceback import os from pathlib import Path @@ -50,7 +52,7 @@ try: import psycopg2 import psycopg2.extras -except: +except ImportError: pass import atexit @@ -59,7 +61,7 @@ ############################################################################### -def profile_function(func): +def profile_function(func) -> None: """Profiling function provided by the temporal framework""" do_profiling = os.getenv("GRASS_TGIS_PROFILE") @@ -246,7 +248,7 @@ def get_enable_timestamp_write(): message_interface = None -def _init_tgis_message_interface(raise_on_error=False): +def _init_tgis_message_interface(raise_on_error: bool = False) -> None: """Initiate the global message interface :param raise_on_error: If True raise a FatalError exception in case of @@ -276,7 +278,7 @@ def get_tgis_message_interface(): c_library_interface = None -def _init_tgis_c_library_interface(): +def _init_tgis_c_library_interface() -> None: """Set the global C-library interface variable that provides a fast and exit safe interface to the C-library libgis, libraster, libraster3d and libvector functions @@ -302,7 +304,7 @@ def get_tgis_c_library_interface(): raise_on_error = False -def set_raise_on_error(raise_exp=True): +def set_raise_on_error(raise_exp: bool = True): """Define behavior on fatal error, invoked using the tgis messenger interface (msgr.fatal()) @@ -412,7 +414,7 @@ def get_tgis_metadata(dbif=None): statement = "SELECT * FROM tgis_metadata;\n" dbif.execute(statement) rows = dbif.fetchall() - except: + except Exception: rows = None if connection_state_changed: @@ -451,7 +453,7 @@ def get_sql_template_path(): ############################################################################### -def stop_subprocesses(): +def stop_subprocesses() -> None: """Stop the messenger and C-interface subprocesses that are started by tgis.init() """ @@ -512,7 +514,7 @@ def get_available_temporal_mapsets(): ############################################################################### -def init(raise_fatal_error=False, skip_db_version_check=False): +def init(raise_fatal_error: bool = False, skip_db_version_check: bool = False): """This function set the correct database backend from GRASS environmental variables and creates the grass temporal database structure for raster, vector and raster3d maps as well as for the space-time datasets strds, @@ -807,7 +809,7 @@ def get_database_info_string(): ############################################################################### -def _create_temporal_database_views(dbif): +def _create_temporal_database_views(dbif) -> None: """Create all views in the temporal database (internal use only) Used by create_temporal_database() and upgrade_temporal_database(). @@ -828,7 +830,7 @@ def _create_temporal_database_views(dbif): dbif.execute_transaction(sql_filepath) -def create_temporal_database(dbif): +def create_temporal_database(dbif) -> None: """This function will create the temporal database It will create all tables and triggers that are needed to run @@ -935,7 +937,7 @@ def create_temporal_database(dbif): ############################################################################### -def upgrade_temporal_database(dbif): +def upgrade_temporal_database(dbif) -> None: """This function will upgrade the temporal database if needed. It will update all tables and triggers that are requested by @@ -998,7 +1000,7 @@ def upgrade_temporal_database(dbif): ############################################################################### -def _create_tgis_metadata_table(content, dbif=None): +def _create_tgis_metadata_table(content, dbif=None) -> None: """!Create the temporal gis metadata table which stores all metadata information about the temporal database. @@ -1025,7 +1027,7 @@ def _create_tgis_metadata_table(content, dbif=None): class SQLDatabaseInterfaceConnection: - def __init__(self): + def __init__(self) -> None: self.tgis_mapsets = get_available_temporal_mapsets() self.current_mapset = get_current_mapset() self.connections = {} @@ -1052,7 +1054,7 @@ def get_dbmi(self, mapset=None): mapset = decode(mapset) return self.connections[mapset].dbmi - def rollback(self, mapset=None): + def rollback(self, mapset=None) -> None: """ Roll back the last transaction. This must be called in case a new query should be performed after a db error. @@ -1062,7 +1064,7 @@ def rollback(self, mapset=None): if mapset is None: mapset = self.current_mapset - def connect(self): + def connect(self) -> None: """Connect to the DBMI to execute SQL statements Supported backends are sqlite3 and postgresql @@ -1078,7 +1080,7 @@ def connect(self): def is_connected(self): return self.connected - def close(self): + def close(self) -> None: """Close the DBMI connection There may be several temporal databases in a location, hence @@ -1114,7 +1116,7 @@ def mogrify_sql_statement(self, content, mapset=None): return self.connections[mapset].mogrify_sql_statement(content) - def check_table(self, table_name, mapset=None): + def check_table(self, table_name: str, mapset=None): """Check if a table exists in the temporal database :param table_name: The name of the table to be checked for existence @@ -1205,7 +1207,7 @@ def execute_transaction(self, statement, mapset=None): return self.connections[mapset].execute_transaction(statement) - def _create_mapset_error_message(self, mapset): + def _create_mapset_error_message(self, mapset) -> str: return ( "You have no permission to " "access mapset <%(mapset)s>, or " @@ -1228,7 +1230,7 @@ class DBConnection: - postgresql via psycopg2 """ - def __init__(self, backend=None, dbstring=None): + def __init__(self, backend=None, dbstring: str | None = None) -> None: """Constructor of a database connection param backend:The database backend sqlite or pg @@ -1261,14 +1263,14 @@ def __init__(self, backend=None, dbstring=None): "\n dbstring: %s" % (backend, self.dbstring), ) - def __del__(self): + def __del__(self) -> None: if self.connected is True: self.close() def is_connected(self): return self.connected - def rollback(self): + def rollback(self) -> None: """ Roll back the last transaction. This must be called in case a new query should be performed after a db error. @@ -1279,7 +1281,7 @@ def rollback(self): if self.connected: self.connection.rollback() - def connect(self, dbstring=None): + def connect(self, dbstring: str | None = None) -> None: """Connect to the DBMI to execute SQL statements Supported backends are sqlite3 and postgresql @@ -1328,7 +1330,7 @@ def connect(self, dbstring=None): ) ) - def close(self): + def close(self) -> None: """Close the DBMI connection TODO: There may be several temporal databases in a location, hence @@ -1442,7 +1444,7 @@ def mogrify_sql_statement(self, content): return statement - def check_table(self, table_name): + def check_table(self, table_name: str): """Check if a table exists in the temporal database :param table_name: The name of the table to be checked for existence @@ -1499,7 +1501,7 @@ def execute(self, statement, args=None): self.cursor.execute(statement, args) else: self.cursor.execute(statement) - except: + except (sqlite3.Error, psycopg2.Error): if connected: self.close() self.msgr.error(_("Unable to execute :\n %(sql)s") % {"sql": statement}) @@ -1542,7 +1544,7 @@ def execute_transaction(self, statement, mapset=None): else: self.cursor.execute(statement) self.connection.commit() - except: + except (sqlite3.Error, psycopg2.Error): if connected: self.close() self.msgr.error( diff --git a/python/grass/temporal/datetime_math.py b/python/grass/temporal/datetime_math.py index b986637162a..9104ab4b689 100644 --- a/python/grass/temporal/datetime_math.py +++ b/python/grass/temporal/datetime_math.py @@ -9,6 +9,8 @@ :authors: Soeren Gebbert """ +from __future__ import annotations + import copy from datetime import datetime, timedelta @@ -25,53 +27,29 @@ DAY_IN_SECONDS = 86400 SECOND_AS_DAY = 1.1574074074074073e-05 -############################################################################### - - -def relative_time_to_time_delta(value): - """Convert the double value representing days - into a timedelta object. - """ +def relative_time_to_time_delta(value: float) -> timedelta: + """Convert the double value representing days into a timedelta object.""" days = int(value) seconds = value % 1 seconds = round(seconds * DAY_IN_SECONDS) - return timedelta(days, seconds) -############################################################################### - - -def time_delta_to_relative_time(delta): - """Convert the time delta into a - double value, representing days. - """ - +def time_delta_to_relative_time(delta: timedelta) -> float: + """Convert the time delta into a double value, representing days.""" return float(delta.days) + float(delta.seconds * SECOND_AS_DAY) -############################################################################### - - -def relative_time_to_time_delta_seconds(value): - """Convert the double value representing seconds - into a timedelta object. - """ - +def relative_time_to_time_delta_seconds(value: float) -> timedelta: + """Convert the double value representing seconds into a timedelta object.""" days = value / 86400 seconds = int(value % 86400) - return timedelta(days, seconds) -############################################################################### - - -def time_delta_to_relative_time_seconds(delta): - """Convert the time delta into a - double value, representing seconds. - """ +def time_delta_to_relative_time_seconds(delta: timedelta) -> float: + """Convert the time delta into a double value, representing seconds.""" return float(delta.days * DAY_IN_SECONDS) + float(delta.seconds) @@ -79,7 +57,9 @@ def time_delta_to_relative_time_seconds(delta): ############################################################################### -def decrement_datetime_by_string(mydate, increment, mult=1): +def decrement_datetime_by_string( + mydate: datetime, increment: str, mult=1 +) -> datetime | None: """Return a new datetime object decremented with the provided relative dates specified as string. Additional a multiplier can be specified to multiply the increment @@ -144,10 +124,9 @@ def decrement_datetime_by_string(mydate, increment, mult=1): return modify_datetime_by_string(mydate, increment, mult, sign=-1) -############################################################################### - - -def increment_datetime_by_string(mydate, increment, mult=1): +def increment_datetime_by_string( + mydate: datetime, increment: str, mult=1 +) -> datetime | None: """Return a new datetime object incremented with the provided relative dates specified as string. Additional a multiplier can be specified to multiply the increment @@ -219,10 +198,9 @@ def increment_datetime_by_string(mydate, increment, mult=1): return modify_datetime_by_string(mydate, increment, mult, sign=1) -############################################################################### - - -def modify_datetime_by_string(mydate, increment, mult=1, sign=1): +def modify_datetime_by_string( + mydate: datetime, increment: str, mult=1, sign: int = 1 +) -> datetime | None: """Return a new datetime object incremented with the provided relative dates specified as string. Additional a multiplier can be specified to multiply the increment @@ -294,8 +272,8 @@ def modify_datetime_by_string(mydate, increment, mult=1, sign=1): def modify_datetime( - mydate, years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0 -): + mydate: datetime, years=0, months=0, weeks=0, days=0, hours=0, minutes=0, seconds=0 +) -> datetime: """Return a new datetime object incremented with the provided relative dates and times""" @@ -373,7 +351,7 @@ def modify_datetime( ############################################################################### -def adjust_datetime_to_granularity(mydate, granularity): +def adjust_datetime_to_granularity(mydate: datetime, granularity): """Modify the datetime object to fit the given granularity - Years will start at the first of January @@ -722,13 +700,10 @@ def compute_datetime_delta(start, end): return comp -############################################################################### - - -def check_datetime_string(time_string, use_dateutil=True): +def check_datetime_string(time_string: str, use_dateutil: bool = True): """Check if a string can be converted into a datetime object and return the object - In case datutil is not installed the supported ISO string formats are: + In case dateutil is not installed the supported ISO string formats are: - YYYY-mm-dd - YYYY-mm-dd HH:MM:SS @@ -828,10 +803,7 @@ def check_datetime_string(time_string, use_dateutil=True): return _("Unable to parse time string: %s") % time_string -############################################################################### - - -def string_to_datetime(time_string): +def string_to_datetime(time_string: str) -> datetime | None: """Convert a string into a datetime object In case datutil is not installed the supported ISO string formats are: @@ -864,10 +836,7 @@ def string_to_datetime(time_string): return time_object -############################################################################### - - -def datetime_to_grass_datetime_string(dt): +def datetime_to_grass_datetime_string(dt: datetime | None) -> str: """Convert a python datetime object into a GRASS datetime string .. code-block:: python @@ -906,7 +875,8 @@ def datetime_to_grass_datetime_string(dt): ] if dt is None: - raise Exception("Empty datetime object in datetime_to_grass_datetime_string") + msg = "Empty datetime object in datetime_to_grass_datetime_string" + raise Exception(msg) # Check for time zone info in the datetime object if dt.tzinfo is not None: @@ -936,6 +906,8 @@ def datetime_to_grass_datetime_string(dt): ############################################################################### + + suffix_units = { "years": "%Y", "year": "%Y", @@ -952,7 +924,7 @@ def datetime_to_grass_datetime_string(dt): } -def create_suffix_from_datetime(start_time, granularity): +def create_suffix_from_datetime(start_time: datetime, granularity) -> str: """Create a datetime string based on a datetime object and a provided granularity that can be used as suffix for map names. @@ -966,7 +938,7 @@ def create_suffix_from_datetime(start_time, granularity): return start_time.strftime(suffix_units[granularity.split(" ")[1]]) -def create_time_suffix(mapp, end=False): +def create_time_suffix(mapp, end: bool = False): """Create a datetime string based on a map datetime object :param mapp: a temporal map dataset @@ -981,7 +953,7 @@ def create_time_suffix(mapp, end=False): return sstring -def create_numeric_suffix(base, count, zeros): +def create_numeric_suffix(base, count: int, zeros: str) -> str: """Create a string based on count and number of zeros decided by zeros :param base: the basename for new map diff --git a/python/grass/temporal/extract.py b/python/grass/temporal/extract.py index 94a4fd0f1c9..aab18c5dd76 100644 --- a/python/grass/temporal/extract.py +++ b/python/grass/temporal/extract.py @@ -39,11 +39,11 @@ def extract_dataset( expression, base, time_suffix, - nprocs=1, - register_null=False, - layer=1, + nprocs: int = 1, + register_null: bool = False, + layer: int = 1, vtype="point,line,boundary,centroid,area,face", -): +) -> None: """Extract a subset of a space time raster, raster3d or vector dataset A mapcalc expression can be provided to process the temporal extracted @@ -318,7 +318,7 @@ def extract_dataset( ############################################################################### -def run_mapcalc2d(expr): +def run_mapcalc2d(expr) -> None: """Helper function to run r.mapcalc in parallel""" try: gs.run_command( @@ -328,7 +328,7 @@ def run_mapcalc2d(expr): sys.exit(1) -def run_mapcalc3d(expr): +def run_mapcalc3d(expr) -> None: """Helper function to run r3.mapcalc in parallel""" try: gs.run_command( @@ -338,7 +338,7 @@ def run_mapcalc3d(expr): sys.exit(1) -def run_vector_extraction(input, output, layer, type, where): +def run_vector_extraction(input, output, layer, type, where) -> None: """Helper function to run r.mapcalc in parallel""" try: gs.run_command( diff --git a/python/grass/temporal/gui_support.py b/python/grass/temporal/gui_support.py index d2cbca13d68..e7c52d6788d 100644 --- a/python/grass/temporal/gui_support.py +++ b/python/grass/temporal/gui_support.py @@ -11,6 +11,7 @@ """ import grass.script as gs +from grass.exceptions import ScriptError from .core import get_available_temporal_mapsets, init_dbif from .factory import dataset_factory @@ -18,7 +19,7 @@ ############################################################################### -def tlist_grouped(type, group_type=False, dbif=None): +def tlist_grouped(type, group_type: bool = False, dbif=None): """List of temporal elements grouped by mapsets. Returns a dictionary where the keys are mapset @@ -27,7 +28,7 @@ def tlist_grouped(type, group_type=False, dbif=None): .. code-block:: python - >>> import grass.temporalas tgis + >>> import grass.temporal as tgis >>> tgis.tlist_grouped('strds')['PERMANENT'] ['precipitation', 'temperature'] @@ -37,15 +38,15 @@ def tlist_grouped(type, group_type=False, dbif=None): :return: directory of mapsets/elements """ result = {} - _type = type + type_ = type dbif, connection_state_changed = init_dbif(dbif) mapset = None - types = ["strds", "str3ds", "stvds"] if _type == "stds" else [_type] - for _type in types: + types = ["strds", "str3ds", "stvds"] if type_ == "stds" else [type_] + for type_ in types: try: - tlist_result = tlist(type=_type, dbif=dbif) - except gs.ScriptError as e: + tlist_result = tlist(type=type_, dbif=dbif) + except ScriptError as e: gs.warning(e) continue @@ -63,10 +64,10 @@ def tlist_grouped(type, group_type=False, dbif=None): result[mapset] = [] if group_type: - if _type in result[mapset]: - result[mapset][_type].append(name) + if type_ in result[mapset]: + result[mapset][type_].append(name) else: - result[mapset][_type] = [ + result[mapset][type_] = [ name, ] else: @@ -88,20 +89,20 @@ def tlist(type, dbif=None): :return: a list of space time dataset ids """ - _type = type + type_ = type id = None - sp = dataset_factory(_type, id) + sp = dataset_factory(type_, id) dbif, connection_state_changed = init_dbif(dbif) mapsets = get_available_temporal_mapsets() output = [] temporal_type = ["absolute", "relative"] - for _type in temporal_type: + for type_ in temporal_type: # For each available mapset for mapset in mapsets.keys(): # Table name - if _type == "absolute": + if type_ == "absolute": table = sp.get_type() + "_view_abs_time" else: table = sp.get_type() + "_view_rel_time" diff --git a/python/grass/temporal/list_stds.py b/python/grass/temporal/list_stds.py index 22a2708123d..9a62a420930 100644 --- a/python/grass/temporal/list_stds.py +++ b/python/grass/temporal/list_stds.py @@ -150,7 +150,7 @@ def _open_output_file(file, encoding="utf-8", **kwargs): yield stream -def _write_line(items, separator, file): +def _write_line(items, separator, file) -> None: if not separator: separator = "," output = separator.join([f"{item}" for item in items]) @@ -158,8 +158,8 @@ def _write_line(items, separator, file): print(f"{output}", file=stream) -def _write_plain(rows, header, separator, file): - def write_plain_row(items, separator, file): +def _write_plain(rows, header, separator, file) -> None: + def write_plain_row(items, separator, file) -> None: output = separator.join([f"{item}" for item in items]) print(f"{output}", file=file) @@ -171,7 +171,7 @@ def write_plain_row(items, separator, file): write_plain_row(items=row, separator=separator, file=stream) -def _write_json(rows, column_names, file): +def _write_json(rows, column_names, file) -> None: # Lazy import output format-specific dependencies. # pylint: disable=import-outside-toplevel import datetime @@ -188,16 +188,14 @@ def default(self, o): dict_rows = [] for row in rows: - new_row = {} - for key, value in zip(column_names, row): - new_row[key] = value + new_row = dict(zip(column_names, row)) dict_rows.append(new_row) meta = {"column_names": column_names} with _open_output_file(file) as stream: json.dump({"data": dict_rows, "metadata": meta}, stream, cls=ResultsEncoder) -def _write_yaml(rows, column_names, file=sys.stdout): +def _write_yaml(rows, column_names, file=sys.stdout) -> None: # Lazy import output format-specific dependencies. # pylint: disable=import-outside-toplevel import yaml @@ -213,17 +211,15 @@ class NoAliasIndentListSafeDumper(yaml.SafeDumper): when https://github.com/yaml/pyyaml/issues/234 is resolved. """ - def ignore_aliases(self, data): + def ignore_aliases(self, data) -> bool: return True - def increase_indent(self, flow=False, indentless=False): + def increase_indent(self, flow: bool = False, indentless: bool = False): return super().increase_indent(flow=flow, indentless=False) dict_rows = [] for row in rows: - new_row = {} - for key, value in zip(column_names, row): - new_row[key] = value + new_row = dict(zip(column_names, row)) dict_rows.append(new_row) meta = {"column_names": column_names} with _open_output_file(file) as stream: @@ -238,7 +234,7 @@ def increase_indent(self, flow=False, indentless=False): ) -def _write_csv(rows, column_names, separator, file=sys.stdout): +def _write_csv(rows, column_names, separator, file=sys.stdout) -> None: # Lazy import output format-specific dependencies. # pylint: disable=import-outside-toplevel import csv @@ -275,7 +271,8 @@ def _write_table(rows, column_names, output_format, separator, file): separator = "," _write_csv(rows=rows, column_names=column_names, separator=separator, file=file) else: - raise ValueError(f"Unknown value '{output_format}' for output_format") + msg = f"Unknown value '{output_format}' for output_format" + raise ValueError(msg) def _get_get_registered_maps_as_objects_with_method(dataset, where, method, gran, dbif): @@ -287,15 +284,15 @@ def _get_get_registered_maps_as_objects_with_method(dataset, where, method, gran ) if method == "gran": if where: - raise ValueError( - f"The where parameter is not supported with method={method}" - ) + msg = f"The where parameter is not supported with method={method}" + raise ValueError(msg) if gran is not None and gran != "": return dataset.get_registered_maps_as_objects_by_granularity( gran=gran, dbif=dbif ) return dataset.get_registered_maps_as_objects_by_granularity(dbif=dbif) - raise ValueError(f"Invalid method '{method}'") + msg = f"Invalid method '{method}'" + raise ValueError(msg) def _get_get_registered_maps_as_objects_delta_gran( @@ -363,7 +360,8 @@ def _get_list_of_maps_delta_gran(dataset, columns, where, method, gran, dbif, ms elif column == "distance_from_begin": row.append(delta_first) else: - raise ValueError(f"Unsupported column '{column}'") + msg = f"Unsupported column '{column}'" + raise ValueError(msg) rows.append(row) return rows @@ -386,14 +384,14 @@ def _get_list_of_maps_stds( def check_columns(column_names, output_format, element_type): if element_type != "stvds" and "layer" in columns: - raise ValueError( - f"Column 'layer' is not allowed with temporal type '{element_type}'" - ) + msg = f"Column 'layer' is not allowed with temporal type '{element_type}'" + raise ValueError(msg) if output_format == "line" and len(column_names) > 1: - raise ValueError( + msg = ( f"'{output_format}' output_format can have only 1 column, " f"not {len(column_names)}" ) + raise ValueError(msg) # This method expects a list of objects for gap detection if method in {"delta", "deltagaps", "gran"}: @@ -481,12 +479,12 @@ def list_maps_of_stds( where, separator, method, - no_header=False, + no_header: bool = False, gran=None, dbif=None, outpath=None, output_format=None, -): +) -> None: """List the maps of a space time dataset using different methods :param type: The type of the maps raster, raster3d or vector diff --git a/python/grass/temporal/mapcalc.py b/python/grass/temporal/mapcalc.py index 8761e86160f..68b2918170c 100644 --- a/python/grass/temporal/mapcalc.py +++ b/python/grass/temporal/mapcalc.py @@ -25,6 +25,9 @@ from .datetime_math import time_delta_to_relative_time from .open_stds import check_new_stds, open_new_stds, open_old_stds +_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME = ( + "The temporal operators <%s> support only absolute time." +) ############################################################################ @@ -35,9 +38,9 @@ def dataset_mapcalculator( expression, base, method, - nprocs=1, - register_null=False, - spatial=False, + nprocs: int = 1, + register_null: bool = False, + spatial: bool = False, ): """Perform map-calculations of maps from different space time raster/raster3d datasets, using a specific sampling method @@ -408,7 +411,7 @@ def dataset_mapcalculator( ############################################################################### -def _run_mapcalc2d(expr): +def _run_mapcalc2d(expr) -> None: """Helper function to run r.mapcalc in parallel""" try: gs.run_command( @@ -421,7 +424,7 @@ def _run_mapcalc2d(expr): ############################################################################### -def _run_mapcalc3d(expr): +def _run_mapcalc3d(expr) -> None: """Helper function to run r3.mapcalc in parallel""" try: gs.run_command( @@ -487,7 +490,7 @@ def _operator_parser(expr, first, current): ############################################################################### -def _parse_start_operators(expr, is_time_absolute, current): +def _parse_start_operators(expr, is_time_absolute: bool, current): """ Supported operators for absolute time: - start_doy() - Day of year (doy) from the start time [1 - 366] @@ -507,74 +510,47 @@ def _parse_start_operators(expr, is_time_absolute, current): if expr.find("start_year()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) expr = expr.replace("start_year()", str(start.year)) if expr.find("start_month()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) expr = expr.replace("start_month()", str(start.month)) if expr.find("start_week()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) expr = expr.replace("start_week()", str(start.isocalendar()[1])) if expr.find("start_day()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) expr = expr.replace("start_day()", str(start.day)) if expr.find("start_hour()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) expr = expr.replace("start_hour()", str(start.hour)) if expr.find("start_minute()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) expr = expr.replace("start_minute()", str(start.minute)) if expr.find("start_second()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) expr = expr.replace("start_second()", str(start.second)) if expr.find("start_dow()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) expr = expr.replace("start_dow()", str(start.isoweekday())) if expr.find("start_doy()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") - % ("start_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("start_*")) year = datetime(start.year, 1, 1) delta = start - year @@ -586,7 +562,7 @@ def _parse_start_operators(expr, is_time_absolute, current): ############################################################################### -def _parse_end_operators(expr, is_time_absolute, current): +def _parse_end_operators(expr, is_time_absolute: bool, current): """ Supported operators for absolute time: - end_doy() - Day of year (doy) from the end time [1 - 366] @@ -609,9 +585,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_year()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_year()", "null()") else: @@ -619,9 +593,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_month()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_month()", "null()") else: @@ -629,9 +601,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_week()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_week()", "null()") else: @@ -639,9 +609,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_day()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_day()", "null()") else: @@ -649,9 +617,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_hour()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_hour()", "null()") else: @@ -659,9 +625,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_minute()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_minute()", "null()") else: @@ -669,9 +633,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_second()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_second()", "null()") else: @@ -679,9 +641,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_dow()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_dow()", "null()") else: @@ -689,9 +649,7 @@ def _parse_end_operators(expr, is_time_absolute, current): if expr.find("end_doy()") >= 0: if not is_time_absolute: - msgr.fatal( - _("The temporal operators <%s> support only absolute time.") % ("end_*") - ) + msgr.fatal(_(_TEMPORAL_OPERATOR_SUPPORTS_ONLY_ABSOLUTE_TIME) % ("end_*")) if not end: expr = expr.replace("end_doy()", "null()") else: @@ -706,7 +664,7 @@ def _parse_end_operators(expr, is_time_absolute, current): ############################################################################### -def _parse_td_operator(expr, is_time_absolute, first, current): +def _parse_td_operator(expr, is_time_absolute: bool, first, current): """Parse the time delta operator td(). This operator represents the size of the current sample time interval in days and fraction of days for absolute time, @@ -732,7 +690,7 @@ def _parse_td_operator(expr, is_time_absolute, first, current): ############################################################################### -def _parse_start_time_operator(expr, is_time_absolute, first, current): +def _parse_start_time_operator(expr, is_time_absolute: bool, first, current): """Parse the start_time() operator. This operator represent the time difference between the start time of the sample space time raster dataset and the start time of the current sample interval or @@ -755,7 +713,7 @@ def _parse_start_time_operator(expr, is_time_absolute, first, current): ############################################################################### -def _parse_end_time_operator(expr, is_time_absolute, first, current): +def _parse_end_time_operator(expr, is_time_absolute: bool, first, current): """Parse the end_time() operator. This operator represent the time difference between the start time of the sample space time raster dataset and the end time of the current sample interval. The time diff --git a/python/grass/temporal/metadata.py b/python/grass/temporal/metadata.py index 4be4c568a76..8c85141e6aa 100644 --- a/python/grass/temporal/metadata.py +++ b/python/grass/temporal/metadata.py @@ -97,7 +97,7 @@ def __init__( ewres=None, min=None, max=None, - ): + ) -> None: SQLDatabaseInterface.__init__(self, table, ident) self.set_id(ident) @@ -110,58 +110,58 @@ def __init__( self.set_min(min) self.set_max(max) - def set_id(self, ident): + def set_id(self, ident) -> None: """Convenient method to set the unique identifier (primary key)""" self.ident = ident self.D["id"] = ident - def set_datatype(self, datatype): + def set_datatype(self, datatype) -> None: """Set the datatype""" self.D["datatype"] = datatype - def set_cols(self, cols): + def set_cols(self, cols) -> None: """Set the number of cols""" if cols is not None: self.D["cols"] = int(cols) else: self.D["cols"] = None - def set_rows(self, rows): + def set_rows(self, rows) -> None: """Set the number of rows""" if rows is not None: self.D["rows"] = int(rows) else: self.D["rows"] = None - def set_number_of_cells(self, number_of_cells): + def set_number_of_cells(self, number_of_cells) -> None: """Set the number of cells""" if number_of_cells is not None: self.D["number_of_cells"] = int(number_of_cells) else: self.D["number_of_cells"] = None - def set_nsres(self, nsres): + def set_nsres(self, nsres) -> None: """Set the north-south resolution""" if nsres is not None: self.D["nsres"] = float(nsres) else: self.D["nsres"] = None - def set_ewres(self, ewres): + def set_ewres(self, ewres) -> None: """Set the east-west resolution""" if ewres is not None: self.D["ewres"] = float(ewres) else: self.D["ewres"] = None - def set_min(self, min): + def set_min(self, min) -> None: """Set the minimum raster value""" if min is not None: self.D["min"] = float(min) else: self.D["min"] = None - def set_max(self, max): + def set_max(self, max) -> None: """Set the maximum raster value""" if max is not None: self.D["max"] = float(max) @@ -242,15 +242,15 @@ def get_max(self): min = property(fget=get_min, fset=set_min) max = property(fget=get_max, fset=set_max) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" self._print_info_body(shell=False) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" self._print_info_body(shell=True) - def _print_info_head(self, shell=False): + def _print_info_head(self, shell: bool = False) -> None: """Print information about this class (head part). No header printed in shell style mode. @@ -262,7 +262,7 @@ def _print_info_head(self, shell=False): " +-------------------- Metadata information ----------------------------------+" # noqa: E501 ) - def _print_info_body(self, shell=False): + def _print_info_body(self, shell: bool = False) -> None: """Print information about this class (body part). :param bool shell: True for human readable style otherwise shell style @@ -298,7 +298,7 @@ class RasterMetadata(RasterMetadataBase): The metadata includes the datatype, number of cols, rows and cells and the north-south and east west resolution of the map. Additionally the - minimum and maximum valuesare stored. + minimum and maximum values are stored. Usage: @@ -368,7 +368,7 @@ def __init__( min=None, max=None, semantic_label=None, - ): + ) -> None: RasterMetadataBase.__init__( self, "raster_metadata", @@ -383,7 +383,7 @@ def __init__( max, ) - def set_semantic_label(self, semantic_label): + def set_semantic_label(self, semantic_label) -> None: """Set the semantic label identifier""" self.D["semantic_label"] = semantic_label @@ -396,14 +396,14 @@ def get_semantic_label(self): semantic_label = property(fget=get_semantic_label, fset=set_semantic_label) - def print_info(self): + def print_info(self) -> None: """Print information about this class.""" self._print_info_head(shell=False) self._print_info_body(shell=False) # semantic label section (raster specific only) print(" | Semantic label:............. " + str(self.get_semantic_label())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" self._print_info_head(shell=True) self._print_info_body(shell=True) @@ -502,7 +502,7 @@ def __init__( tbres=None, min=None, max=None, - ): + ) -> None: RasterMetadataBase.__init__( self, "raster3d_metadata", @@ -520,14 +520,14 @@ def __init__( self.set_tbres(tbres) self.set_depths(depths) - def set_depths(self, depths): + def set_depths(self, depths) -> None: """Set the number of depths""" if depths is not None: self.D["depths"] = int(depths) else: self.D["depths"] = None - def set_tbres(self, tbres): + def set_tbres(self, tbres) -> None: """Set the top-bottom resolution""" if tbres is not None: self.D["tbres"] = float(tbres) @@ -551,14 +551,14 @@ def get_tbres(self): depths = property(fget=get_depths, fset=set_depths) tbres = property(fget=get_tbres, fset=set_tbres) - def print_info(self): + def print_info(self) -> None: """Print information about this class.""" self._print_info_head(shell=False) self._print_info_body(shell=False) print(" | Number of depths:........... " + str(self.get_depths())) print(" | Top-Bottom resolution:...... " + str(self.get_tbres())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" self._print_info_head(shell=True) self._print_info_body(shell=True) @@ -660,7 +660,7 @@ class VectorMetadata(SQLDatabaseInterface): def __init__( self, ident=None, - is_3d=False, + is_3d: bool = False, number_of_points=None, number_of_lines=None, number_of_boundaries=None, @@ -673,7 +673,7 @@ def __init__( number_of_islands=None, number_of_holes=None, number_of_volumes=None, - ): + ) -> None: SQLDatabaseInterface.__init__(self, "vector_metadata", ident) self.set_id(ident) @@ -691,60 +691,60 @@ def __init__( self.set_number_of_holes(number_of_holes) self.set_number_of_volumes(number_of_volumes) - def set_id(self, ident): + def set_id(self, ident) -> None: """Convenient method to set the unique identifier (primary key)""" self.ident = ident self.D["id"] = ident - def set_3d_info(self, is_3d): + def set_3d_info(self, is_3d) -> None: """Set True if the vector map is three dimensional""" self.D["is_3d"] = is_3d - def set_number_of_points(self, number_of_points): + def set_number_of_points(self, number_of_points) -> None: """Set the number of points of the vector map""" self.D["points"] = number_of_points - def set_number_of_lines(self, number_of_lines): + def set_number_of_lines(self, number_of_lines) -> None: """Set the number of lines of the vector map""" self.D["lines"] = number_of_lines - def set_number_of_boundaries(self, number_of_boundaries): + def set_number_of_boundaries(self, number_of_boundaries) -> None: """Set the number of boundaries of the vector map""" self.D["boundaries"] = number_of_boundaries - def set_number_of_centroids(self, number_of_centroids): + def set_number_of_centroids(self, number_of_centroids) -> None: """Set the number of centroids of the vector map""" self.D["centroids"] = number_of_centroids - def set_number_of_faces(self, number_of_faces): + def set_number_of_faces(self, number_of_faces) -> None: """Set the number of faces of the vector map""" self.D["faces"] = number_of_faces - def set_number_of_kernels(self, number_of_kernels): + def set_number_of_kernels(self, number_of_kernels) -> None: """Set the number of kernels of the vector map""" self.D["kernels"] = number_of_kernels - def set_number_of_primitives(self, number_of_primitives): + def set_number_of_primitives(self, number_of_primitives) -> None: """Set the number of primitives of the vector map""" self.D["primitives"] = number_of_primitives - def set_number_of_nodes(self, number_of_nodes): + def set_number_of_nodes(self, number_of_nodes) -> None: """Set the number of nodes of the vector map""" self.D["nodes"] = number_of_nodes - def set_number_of_areas(self, number_of_areas): + def set_number_of_areas(self, number_of_areas) -> None: """Set the number of areas of the vector map""" self.D["areas"] = number_of_areas - def set_number_of_islands(self, number_of_islands): + def set_number_of_islands(self, number_of_islands) -> None: """Set the number of islands of the vector map""" self.D["islands"] = number_of_islands - def set_number_of_holes(self, number_of_holes): + def set_number_of_holes(self, number_of_holes) -> None: """Set the number of holes of the vector map""" self.D["holes"] = number_of_holes - def set_number_of_volumes(self, number_of_volumes): + def set_number_of_volumes(self, number_of_volumes) -> None: """Set the number of volumes of the vector map""" self.D["volumes"] = number_of_volumes @@ -869,7 +869,7 @@ def get_number_of_volumes(self): number_of_holes = property(fget=get_number_of_holes, fset=set_number_of_holes) number_of_volumes = property(fget=get_number_of_volumes, fset=set_number_of_volumes) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" print( " +-------------------- Metadata information ----------------------------------+" # noqa: E501 @@ -888,7 +888,7 @@ def print_info(self): print(" | Number of holes ............ " + str(self.get_number_of_holes())) print(" | Number of volumes .......... " + str(self.get_number_of_volumes())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" print("is_3d=" + str(self.get_3d_info())) print("points=" + str(self.get_number_of_points())) @@ -943,7 +943,7 @@ class STDSMetadataBase(SQLDatabaseInterface): def __init__( self, table=None, ident=None, title=None, description=None, command=None - ): + ) -> None: SQLDatabaseInterface.__init__(self, table, ident) self.set_id(ident) @@ -953,20 +953,20 @@ def __init__( # No setter for this self.D["number_of_maps"] = None - def set_id(self, ident): + def set_id(self, ident) -> None: """Convenient method to set the unique identifier (primary key)""" self.ident = ident self.D["id"] = ident - def set_title(self, title): + def set_title(self, title) -> None: """Set the title""" self.D["title"] = title - def set_description(self, description): + def set_description(self, description) -> None: """Set the number of cols""" self.D["description"] = description - def set_command(self, command): + def set_command(self, command) -> None: """Set the number of cols""" self.D["command"] = command @@ -1013,17 +1013,17 @@ def get_number_of_maps(self): description = property(fget=get_description, fset=set_description) number_of_maps = property(fget=get_number_of_maps) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" self._print_info_body(shell=False) self._print_info_tail(shell=False) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" self._print_info_body(shell=True) self._print_info_tail(shell=True) - def _print_info_head(self, shell=False): + def _print_info_head(self, shell: bool = False) -> None: """Print information about this class (head part). No header printed in shell style mode. @@ -1035,13 +1035,13 @@ def _print_info_head(self, shell=False): " +-------------------- Metadata information ----------------------------------+" # noqa: E501 ) - def _print_info_body(self, shell=False): + def _print_info_body(self, shell: bool = False) -> None: """Print information about this class (body part). :param bool shell: True for human readable style otherwise shell style """ - def _print_info_tail(self, shell=False): + def _print_info_tail(self, shell: bool = False) -> None: """Print information about this class (tail part). :param bool shell: True for human readable style otherwise shell style @@ -1061,7 +1061,7 @@ def _print_info_tail(self, shell=False): for token in command.split("\n"): print(" | " + str(token)) - def print_history(self): + def print_history(self) -> None: """Print history information about this class in human readable shell style """ @@ -1165,7 +1165,7 @@ def __init__( title=None, description=None, aggregation_type=None, - ): + ) -> None: STDSMetadataBase.__init__(self, table, ident, title, description) # Initialize the dict to select all values from the db @@ -1179,7 +1179,7 @@ def __init__( self.D["ewres_max"] = None self.D["aggregation_type"] = aggregation_type - def set_aggregation_type(self, aggregation_type): + def set_aggregation_type(self, aggregation_type) -> None: """Set the aggregation type of the dataset (mean, min, max, ...)""" self.D["aggregation_type"] = aggregation_type @@ -1273,7 +1273,7 @@ def get_ewres_max(self): max_max = property(fget=get_max_max) aggregation_type = property(fset=set_aggregation_type, fget=get_aggregation_type) - def _print_info_body(self, shell=False): + def _print_info_body(self, shell: bool = False) -> None: """Print information about this class (body part). :param bool shell: True for human readable style otherwise shell style @@ -1376,7 +1376,9 @@ class STRDSMetadata(STDSRasterMetadataBase): """ - def __init__(self, ident=None, raster_register=None, title=None, description=None): + def __init__( + self, ident=None, raster_register=None, title=None, description=None + ) -> None: STDSRasterMetadataBase.__init__( self, "strds_metadata", ident, title, description ) @@ -1386,7 +1388,7 @@ def __init__(self, ident=None, raster_register=None, title=None, description=Non self.set_raster_register(raster_register) - def set_raster_register(self, raster_register): + def set_raster_register(self, raster_register) -> None: """Set the raster map register table name""" self.D["raster_register"] = raster_register @@ -1448,17 +1450,17 @@ def get_semantic_labels(self): number_of_semantic_labels = property(fget=get_number_of_semantic_labels) semantic_labels = property(fget=get_semantic_labels) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" self._print_info_head(shell=False) super().print_info() - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" self._print_info_head(shell=True) super().print_shell_info() - def _print_info_body(self, shell=False): + def _print_info_body(self, shell: bool = False) -> None: """Print information about this class (body part). :param bool shell: True for human readable style otherwise shell style @@ -1561,7 +1563,7 @@ class STR3DSMetadata(STDSRasterMetadataBase): def __init__( self, ident=None, raster3d_register=None, title=None, description=None - ): + ) -> None: STDSRasterMetadataBase.__init__( self, "str3ds_metadata", ident, title, description ) @@ -1570,7 +1572,7 @@ def __init__( self.D["tbres_min"] = None self.D["tbres_max"] = None - def set_raster3d_register(self, raster3d_register): + def set_raster3d_register(self, raster3d_register) -> None: """Set the raster map register table name""" self.D["raster3d_register"] = raster3d_register @@ -1603,17 +1605,17 @@ def get_tbres_max(self): tbres_min = property(fget=get_tbres_min) tbres_max = property(fget=get_tbres_max) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" self._print_info_head(shell=False) super().print_info() - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" self._print_info_head(shell=True) super().print_shell_info() - def _print_info_body(self, shell=False): + def _print_info_body(self, shell: bool = False) -> None: """Print information about this class (body part). :param bool shell: True for human readable style otherwise shell style @@ -1714,7 +1716,9 @@ class STVDSMetadata(STDSMetadataBase): """ - def __init__(self, ident=None, vector_register=None, title=None, description=None): + def __init__( + self, ident=None, vector_register=None, title=None, description=None + ) -> None: STDSMetadataBase.__init__(self, "stvds_metadata", ident, title, description) self.set_vector_register(vector_register) @@ -1731,7 +1735,7 @@ def __init__(self, ident=None, vector_register=None, title=None, description=Non self.D["holes"] = None self.D["volumes"] = None - def set_vector_register(self, vector_register): + def set_vector_register(self, vector_register) -> None: """Set the vector map register table name""" self.D["vector_register"] = vector_register @@ -1865,17 +1869,17 @@ def get_number_of_volumes(self): number_of_holes = property(fget=get_number_of_holes) number_of_volumes = property(fget=get_number_of_volumes) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" self._print_info_head(shell=False) super().print_info() - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" self._print_info_head(shell=True) super().print_shell_info() - def _print_info_body(self, shell=False): + def _print_info_body(self, shell: bool = False) -> None: """Print information about this class (body part). :param bool shell: True for human readable style otherwise shell style diff --git a/python/grass/temporal/open_stds.py b/python/grass/temporal/open_stds.py index 3b3a28ea99a..d05212410a8 100644 --- a/python/grass/temporal/open_stds.py +++ b/python/grass/temporal/open_stds.py @@ -35,7 +35,7 @@ def open_old_stds(name, type, dbif=None): :param name: The name of the space time dataset, if the name does not contain the mapset (name@mapset) then the current mapset - will be used to identifiy the space time dataset + will be used to identify the space time dataset :param type: The type of the space time dataset (strd, str3ds, stvds, raster, vector, raster3d) :param dbif: The optional database interface to be used @@ -88,7 +88,7 @@ def open_old_stds(name, type, dbif=None): ############################################################################### -def check_new_stds(name, type, dbif=None, overwrite=False): +def check_new_stds(name, type, dbif=None, overwrite: bool = False): """Check if a new space time dataset of a specific type can be created :param name: The name of the new space time dataset @@ -153,7 +153,7 @@ def check_new_stds(name, type, dbif=None, overwrite=False): def open_new_stds( - name, type, temporaltype, title, descr, semantic, dbif=None, overwrite=False + name, type, temporaltype, title, descr, semantic, dbif=None, overwrite: bool = False ): """Create a new space time dataset of a specific type @@ -210,7 +210,9 @@ def open_new_stds( ############################################################################ -def check_new_map_dataset(name, layer=None, type="raster", overwrite=False, dbif=None): +def check_new_map_dataset( + name, layer=None, type="raster", overwrite: bool = False, dbif=None +): """Check if a new map dataset of a specific type can be created in the temporal database @@ -254,7 +256,12 @@ def check_new_map_dataset(name, layer=None, type="raster", overwrite=False, dbif def open_new_map_dataset( - name, layer=None, type="raster", temporal_extent=None, overwrite=False, dbif=None + name, + layer=None, + type="raster", + temporal_extent=None, + overwrite: bool = False, + dbif=None, ): """Create a new map dataset object of a specific type that can be registered in the temporal database diff --git a/python/grass/temporal/register.py b/python/grass/temporal/register.py index d8705765d02..b00d6631619 100644 --- a/python/grass/temporal/register.py +++ b/python/grass/temporal/register.py @@ -44,10 +44,10 @@ def register_maps_in_space_time_dataset( unit=None, increment=None, dbif=None, - interval=False, - fs="|", - update_cmd_list=True, -): + interval: bool = False, + fs: str = "|", + update_cmd_list: bool = True, +) -> None: """Use this method to register maps in space time datasets. Additionally a start time string and an increment string can be @@ -126,8 +126,7 @@ def register_maps_in_space_time_dataset( # create new stds only in the current mapset # remove all connections to any other mapsets # ugly hack ! - currcon = {} - currcon[mapset] = dbif.connections[mapset] + currcon = {mapset: dbif.connections[mapset]} dbif.connections = currcon # The name of the space time dataset is optional @@ -464,8 +463,8 @@ def register_maps_in_space_time_dataset( def assign_valid_time_to_map( - ttype, map_object, start, end, unit, increment=None, mult=1, interval=False -): + ttype, map_object, start, end, unit, increment=None, mult=1, interval: bool = False +) -> None: """Assign the valid time to a map dataset :param ttype: The temporal type which should be assigned @@ -590,8 +589,8 @@ def assign_valid_time_to_map( def register_map_object_list( - type, map_list, output_stds, delete_empty=False, unit=None, dbif=None -): + type, map_list, output_stds, delete_empty: bool = False, unit=None, dbif=None +) -> None: """Register a list of AbstractMapDataset objects in the temporal database and optional in a space time dataset. diff --git a/python/grass/temporal/sampling.py b/python/grass/temporal/sampling.py index 5209ce6a80c..5730901590a 100644 --- a/python/grass/temporal/sampling.py +++ b/python/grass/temporal/sampling.py @@ -34,8 +34,8 @@ def sample_stds_by_stds_topology( header, separator, method, - spatial=False, - print_only=True, + spatial: bool = False, + print_only: bool = True, ): """Sample the input space time datasets with a sample space time dataset, return the created map matrix and optionally diff --git a/python/grass/temporal/space_time_datasets.py b/python/grass/temporal/space_time_datasets.py index 92721277c9e..97678e5ce23 100644 --- a/python/grass/temporal/space_time_datasets.py +++ b/python/grass/temporal/space_time_datasets.py @@ -9,8 +9,11 @@ :authors: Soeren Gebbert """ +from __future__ import annotations + import getpass from datetime import datetime +from typing import Literal import grass.script.array as garray @@ -177,18 +180,18 @@ class RasterDataset(AbstractMapDataset): """ - def __init__(self, ident): + def __init__(self, ident) -> None: AbstractMapDataset.__init__(self) self.reset(ident) - def is_stds(self): + def is_stds(self) -> Literal[False]: """Return True if this class is a space time dataset :return: True if this class is a space time dataset, False otherwise """ return False - def get_type(self): + def get_type(self) -> Literal["raster"]: return "raster" def get_new_instance(self, ident): @@ -253,7 +256,7 @@ def get_np_array(self): return garray.array(self.get_map_id()) return garray.array() - def reset(self, ident): + def reset(self, ident) -> None: """Reset the internal structure and set the identifier""" self.base = RasterBase(ident=ident) self.absolute_time = RasterAbsoluteTime(ident=ident) @@ -269,7 +272,7 @@ def has_grass_timestamp(self): """ return self.ciface.has_raster_timestamp(self.get_name(), self.get_mapset()) - def read_timestamp_from_grass(self): + def read_timestamp_from_grass(self) -> bool: """Read the timestamp of this map from the map metadata in the grass file system based spatial database and set the internal time stamp that should be inserted/updated @@ -300,7 +303,7 @@ def read_timestamp_from_grass(self): return True - def write_timestamp_to_grass(self): + def write_timestamp_to_grass(self) -> bool: """Write the timestamp of this map into the map metadata in the grass file system based spatial database. @@ -332,7 +335,7 @@ def write_timestamp_to_grass(self): return True - def remove_timestamp_from_grass(self): + def remove_timestamp_from_grass(self) -> bool: """Remove the timestamp from the grass file system based spatial database @@ -350,7 +353,7 @@ def remove_timestamp_from_grass(self): return True - def read_semantic_label_from_grass(self): + def read_semantic_label_from_grass(self) -> bool: """Read the semantic label of this map from the map metadata in the GRASS file system based spatial database and set the internal semantic label that should be inserted/updated @@ -371,7 +374,7 @@ def read_semantic_label_from_grass(self): return True - def write_semantic_label_to_grass(self): + def write_semantic_label_to_grass(self) -> bool: """Write the semantic label of this map into the map metadata in the GRASS file system based spatial database. @@ -398,7 +401,7 @@ def map_exists(self): """ return self.ciface.raster_map_exists(self.get_name(), self.get_mapset()) - def load(self): + def load(self) -> bool: """Load all info from an existing raster map into the internal structure This method checks first if the map exists, in case it exists @@ -462,7 +465,7 @@ def load(self): return False - def set_semantic_label(self, semantic_label): + def set_semantic_label(self, semantic_label) -> None: """Set semantic label identifier Metadata is updated in order to propagate semantic label into @@ -606,18 +609,18 @@ class Raster3DDataset(AbstractMapDataset): """ - def __init__(self, ident): + def __init__(self, ident) -> None: AbstractMapDataset.__init__(self) self.reset(ident) - def is_stds(self): + def is_stds(self) -> Literal[False]: """Return True if this class is a space time dataset :return: True if this class is a space time dataset, False otherwise """ return False - def get_type(self): + def get_type(self) -> Literal["raster3d"]: return "raster3d" def get_new_instance(self, ident): @@ -693,7 +696,7 @@ def get_np_array(self): return garray.array3d(self.get_map_id()) return garray.array3d() - def reset(self, ident): + def reset(self, ident) -> None: """Reset the internal structure and set the identifier""" self.base = Raster3DBase(ident=ident) self.absolute_time = Raster3DAbsoluteTime(ident=ident) @@ -709,7 +712,7 @@ def has_grass_timestamp(self): """ return self.ciface.has_raster3d_timestamp(self.get_name(), self.get_mapset()) - def read_timestamp_from_grass(self): + def read_timestamp_from_grass(self) -> bool: """Read the timestamp of this map from the map metadata in the grass file system based spatial database and set the internal time stamp that should be inserted/updated @@ -740,7 +743,7 @@ def read_timestamp_from_grass(self): return True - def write_timestamp_to_grass(self): + def write_timestamp_to_grass(self) -> bool: """Write the timestamp of this map into the map metadata in the grass file system based spatial database. @@ -772,7 +775,7 @@ def write_timestamp_to_grass(self): return True - def remove_timestamp_from_grass(self): + def remove_timestamp_from_grass(self) -> bool: """Remove the timestamp from the grass file system based spatial database :return: True if success, False on error @@ -796,7 +799,7 @@ def map_exists(self): """ return self.ciface.raster3d_map_exists(self.get_name(), self.get_mapset()) - def load(self): + def load(self) -> bool: """Load all info from an existing 3d raster map into the internal structure This method checks first if the map exists, in case it exists @@ -974,18 +977,18 @@ class VectorDataset(AbstractMapDataset): """ - def __init__(self, ident): + def __init__(self, ident) -> None: AbstractMapDataset.__init__(self) self.reset(ident) - def is_stds(self): + def is_stds(self) -> Literal[False]: """Return True if this class is a space time dataset :return: True if this class is a space time dataset, False otherwise """ return False - def get_type(self): + def get_type(self) -> Literal["vector"]: return "vector" def get_new_instance(self, ident): @@ -1037,7 +1040,7 @@ def spatial_disjoint_union(self, dataset): """ return self.spatial_extent.disjoint_union_2d(dataset.spatial_extent) - def reset(self, ident): + def reset(self, ident) -> None: """Reset the internal structure and set the identifier""" self.base = VectorBase(ident=ident) self.absolute_time = VectorAbsoluteTime(ident=ident) @@ -1052,7 +1055,7 @@ def has_grass_timestamp(self): self.get_name(), self.get_mapset(), self.get_layer() ) - def read_timestamp_from_grass(self): + def read_timestamp_from_grass(self) -> bool: """Read the timestamp of this map from the map metadata in the grass file system based spatial database and set the internal time stamp that should be inserted/updated @@ -1081,7 +1084,7 @@ def read_timestamp_from_grass(self): return True - def write_timestamp_to_grass(self): + def write_timestamp_to_grass(self) -> bool: """Write the timestamp of this map into the map metadata in the grass file system based spatial database. @@ -1110,7 +1113,7 @@ def write_timestamp_to_grass(self): return True - def remove_timestamp_from_grass(self): + def remove_timestamp_from_grass(self) -> bool: """Remove the timestamp from the grass file system based spatial database @@ -1135,7 +1138,7 @@ def map_exists(self): """ return self.ciface.vector_map_exists(self.get_name(), self.get_mapset()) - def load(self): + def load(self) -> bool: """Load all info from an existing vector map into the internal structure This method checks first if the map exists, in case it exists @@ -1230,24 +1233,24 @@ class SpaceTimeRasterDataset(AbstractSpaceTimeDataset): ... """ - def __init__(self, ident): + def __init__(self, ident) -> None: AbstractSpaceTimeDataset.__init__(self, ident) - def set_semantic_label(self, semantic_label): + def set_semantic_label(self, semantic_label) -> None: """Set semantic label :param str semantic_label: semantic label (eg. S2_1) """ self.semantic_label = semantic_label - def is_stds(self): + def is_stds(self) -> Literal[True]: """Return True if this class is a space time dataset :return: True if this class is a space time dataset, False otherwise """ return True - def get_type(self): + def get_type(self) -> Literal["strds"]: return "strds" def get_new_instance(self, ident): @@ -1263,7 +1266,7 @@ def get_map_register(self): """Return the name of the map register table""" return self.metadata.get_raster_register() - def set_map_register(self, name): + def set_map_register(self, name) -> None: """Set the name of the map register table""" self.metadata.set_raster_register(name) @@ -1301,7 +1304,7 @@ def spatial_disjoint_union(self, dataset): """ return self.spatial_extent.disjoint_union_2d(dataset.spatial_extent) - def reset(self, ident): + def reset(self, ident) -> None: """Reset the internal structure and set the identifier""" self.base = STRDSBase(ident=ident) self.base.set_creator(str(getpass.getuser())) @@ -1346,17 +1349,17 @@ class SpaceTimeRaster3DDataset(AbstractSpaceTimeDataset): ... """ - def __init__(self, ident): + def __init__(self, ident) -> None: AbstractSpaceTimeDataset.__init__(self, ident) - def is_stds(self): + def is_stds(self) -> Literal[True]: """Return True if this class is a space time dataset :return: True if this class is a space time dataset, False otherwise """ return True - def get_type(self): + def get_type(self) -> Literal["str3ds"]: return "str3ds" def get_new_instance(self, ident): @@ -1372,7 +1375,7 @@ def get_map_register(self): """Return the name of the map register table""" return self.metadata.get_raster3d_register() - def set_map_register(self, name): + def set_map_register(self, name) -> None: """Set the name of the map register table""" self.metadata.set_raster3d_register(name) @@ -1422,7 +1425,7 @@ def spatial_disjoint_union(self, dataset): return self.spatial_extent.disjoint_union(dataset.spatial_extent) return self.spatial_extent.disjoint_union_2d(dataset.spatial_extent) - def reset(self, ident): + def reset(self, ident) -> None: """Reset the internal structure and set the identifier""" self.base = STR3DSBase(ident=ident) self.base.set_creator(str(getpass.getuser())) @@ -1467,17 +1470,17 @@ class SpaceTimeVectorDataset(AbstractSpaceTimeDataset): ... """ - def __init__(self, ident): + def __init__(self, ident) -> None: AbstractSpaceTimeDataset.__init__(self, ident) - def is_stds(self): + def is_stds(self) -> Literal[True]: """Return True if this class is a space time dataset :return: True if this class is a space time dataset, False otherwise """ return True - def get_type(self): + def get_type(self) -> Literal["stvds"]: return "stvds" def get_new_instance(self, ident): @@ -1493,7 +1496,7 @@ def get_map_register(self): """Return the name of the map register table""" return self.metadata.get_vector_register() - def set_map_register(self, name): + def set_map_register(self, name) -> None: """Set the name of the map register table""" self.metadata.set_vector_register(name) @@ -1531,7 +1534,7 @@ def spatial_disjoint_union(self, dataset): """ return self.spatial_extent.disjoint_union_2d(dataset.spatial_extent) - def reset(self, ident): + def reset(self, ident) -> None: """Reset the internal structure and set the identifier""" self.base = STVDSBase(ident=ident) self.base.set_creator(str(getpass.getuser())) diff --git a/python/grass/temporal/spatial_extent.py b/python/grass/temporal/spatial_extent.py index 7bec3bc35e7..8154a02805b 100644 --- a/python/grass/temporal/spatial_extent.py +++ b/python/grass/temporal/spatial_extent.py @@ -70,6 +70,8 @@ :authors: Soeren Gebbert """ +from typing import Literal + from .base import SQLDatabaseInterface @@ -137,7 +139,7 @@ def __init__( top=None, bottom=None, proj="XY", - ): + ) -> None: SQLDatabaseInterface.__init__(self, table, ident) self.set_id(ident) self.set_spatial_extent_from_values(north, south, east, west, top, bottom) @@ -866,13 +868,13 @@ def cover_2d(self, extent) -> bool: # We check that at least one edge of extent is located in self edge_count = 0 - if eW > W and eW < E: + if W < eW < E: edge_count += 1 - if eE < E and eE > W: + if W < eE < E: edge_count += 1 - if eN < N and eN > S: + if S < eN < N: edge_count += 1 - if eS > S and eS < N: + if S < eS < N: edge_count += 1 return edge_count != 0 @@ -939,21 +941,21 @@ def cover(self, extent) -> bool: # We check that at least one edge of extent is located in self edge_count = 0 - if eW > W and eW < E: + if W < eW < E: edge_count += 1 - if eE < E and eE > W: + if W < eE < E: edge_count += 1 - if eN < N and eN > S: + if S < eN < N: edge_count += 1 - if eS > S and eS < N: + if S < eS < N: edge_count += 1 - if eN < N and eN > S: + if S < eN < N: edge_count += 1 - if eS > S and eS < N: + if S < eS < N: edge_count += 1 - if eT < T and eT > B: + if B < eT < T: edge_count += 1 - if eB > B and eB < T: + if B < eB < T: edge_count += 1 return edge_count != 0 @@ -1110,7 +1112,7 @@ def overlap(self, extent) -> bool: or self.get_bottom() >= T ) - def meet_2d(self, extent): + def meet_2d(self, extent) -> bool: """Return True if this extent (A) meets with the provided spatial extent (B) in two dimensions. @@ -1303,7 +1305,7 @@ def disjoint_2d(self, extent) -> bool: or self.meet_2d(extent) ) - def disjoint(self, extent): + def disjoint(self, extent) -> bool: """Return True if this extent is disjoint with the provided spatial extent in three dimensions. @@ -1321,7 +1323,17 @@ def disjoint(self, extent): or self.meet(extent) ) - def spatial_relation_2d(self, extent): + def spatial_relation_2d(self, extent) -> Literal[ + "equivalent", + "contain", + "in", + "cover", + "covered", + "overlap", + "meet", + "disjoint", + "unknown", + ]: """Returns the two dimensional spatial relation between this extent and the provided spatial extent in two dimensions. @@ -1358,7 +1370,17 @@ def spatial_relation_2d(self, extent): return "unknown" - def spatial_relation(self, extent): + def spatial_relation(self, extent) -> Literal[ + "equivalent", + "contain", + "in", + "cover", + "covered", + "overlap", + "meet", + "disjoint", + "unknown", + ]: """Returns the two dimensional spatial relation between this extent and the provided spatial extent in three dimensions. @@ -1667,7 +1689,9 @@ def spatial_relation(self, extent): return "unknown" - def set_spatial_extent_from_values(self, north, south, east, west, top, bottom): + def set_spatial_extent_from_values( + self, north, south, east, west, top, bottom + ) -> None: """Set the three dimensional spatial extent :param north: The northern edge @@ -1685,7 +1709,7 @@ def set_spatial_extent_from_values(self, north, south, east, west, top, bottom): self.set_top(top) self.set_bottom(bottom) - def set_spatial_extent(self, spatial_extent): + def set_spatial_extent(self, spatial_extent) -> None: """Set the three dimensional spatial extent :param spatial_extent: An object of type SpatialExtent or its @@ -1699,7 +1723,7 @@ def set_spatial_extent(self, spatial_extent): self.set_top(spatial_extent.get_top()) self.set_bottom(spatial_extent.get_bottom()) - def set_projection(self, proj): + def set_projection(self, proj) -> None: """Set the projection of the spatial extent it should be XY or LL. As default the projection is XY """ @@ -1708,7 +1732,7 @@ def set_projection(self, proj): else: self.D["proj"] = proj - def set_spatial_extent_from_values_2d(self, north, south, east, west): + def set_spatial_extent_from_values_2d(self, north, south, east, west) -> None: """Set the two dimensional spatial extent from values :param north: The northern edge @@ -1722,7 +1746,7 @@ def set_spatial_extent_from_values_2d(self, north, south, east, west): self.set_east(east) self.set_west(west) - def set_spatial_extent_2d(self, spatial_extent): + def set_spatial_extent_2d(self, spatial_extent) -> None: """Set the three dimensional spatial extent :param spatial_extent: An object of type SpatialExtent or its @@ -1734,47 +1758,47 @@ def set_spatial_extent_2d(self, spatial_extent): self.set_east(spatial_extent.east) self.set_west(spatial_extent.west) - def set_id(self, ident): + def set_id(self, ident) -> None: """Convenient method to set the unique identifier (primary key)""" self.ident = ident self.D["id"] = ident - def set_north(self, north): + def set_north(self, north) -> None: """Set the northern edge of the map""" if north is not None: self.D["north"] = float(north) else: self.D["north"] = None - def set_south(self, south): + def set_south(self, south) -> None: """Set the southern edge of the map""" if south is not None: self.D["south"] = float(south) else: self.D["south"] = None - def set_west(self, west): + def set_west(self, west) -> None: """Set the western edge of the map""" if west is not None: self.D["west"] = float(west) else: self.D["west"] = None - def set_east(self, east): + def set_east(self, east) -> None: """Set the eastern edge of the map""" if east is not None: self.D["east"] = float(east) else: self.D["east"] = None - def set_top(self, top): + def set_top(self, top) -> None: """Set the top edge of the map""" if top is not None: self.D["top"] = float(top) else: self.D["top"] = None - def set_bottom(self, bottom): + def set_bottom(self, bottom) -> None: """Set the bottom edge of the map""" if bottom is not None: self.D["bottom"] = float(bottom) @@ -1884,7 +1908,7 @@ def get_bottom(self): top = property(fget=get_top, fset=set_top) bottom = property(fget=get_bottom, fset=set_bottom) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" # 0123456789012345678901234567890 print( @@ -1897,7 +1921,7 @@ def print_info(self): print(" | Top:........................ " + str(self.get_top())) print(" | Bottom:..................... " + str(self.get_bottom())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" print("north=" + str(self.get_north())) print("south=" + str(self.get_south())) @@ -1920,7 +1944,7 @@ def __init__( west=None, top=None, bottom=None, - ): + ) -> None: SpatialExtent.__init__( self, "raster_spatial_extent", ident, north, south, east, west, top, bottom ) @@ -1936,7 +1960,7 @@ def __init__( west=None, top=None, bottom=None, - ): + ) -> None: SpatialExtent.__init__( self, "raster3d_spatial_extent", @@ -1960,7 +1984,7 @@ def __init__( west=None, top=None, bottom=None, - ): + ) -> None: SpatialExtent.__init__( self, "vector_spatial_extent", ident, north, south, east, west, top, bottom ) @@ -1976,7 +2000,7 @@ def __init__( west=None, top=None, bottom=None, - ): + ) -> None: SpatialExtent.__init__( self, "strds_spatial_extent", ident, north, south, east, west, top, bottom ) @@ -1992,7 +2016,7 @@ def __init__( west=None, top=None, bottom=None, - ): + ) -> None: SpatialExtent.__init__( self, "str3ds_spatial_extent", ident, north, south, east, west, top, bottom ) @@ -2008,7 +2032,7 @@ def __init__( west=None, top=None, bottom=None, - ): + ) -> None: SpatialExtent.__init__( self, "stvds_spatial_extent", ident, north, south, east, west, top, bottom ) diff --git a/python/grass/temporal/spatial_topology_dataset_connector.py b/python/grass/temporal/spatial_topology_dataset_connector.py index 93fe3270c43..7bd8d836f7b 100644 --- a/python/grass/temporal/spatial_topology_dataset_connector.py +++ b/python/grass/temporal/spatial_topology_dataset_connector.py @@ -74,10 +74,10 @@ class SpatialTopologyDatasetConnector: """ - def __init__(self): + def __init__(self) -> None: self.reset_spatial_topology() - def reset_spatial_topology(self): + def reset_spatial_topology(self) -> None: """Reset any information about temporal topology""" self._spatial_topology = {} self._has_spatial_topology = False @@ -147,11 +147,11 @@ def get_number_of_spatial_relations(self): return relations - def set_spatial_topology_build_true(self): + def set_spatial_topology_build_true(self) -> None: """Same as name""" self._has_spatial_topology = True - def set_spatial_topology_build_false(self): + def set_spatial_topology_build_false(self) -> None: """Same as name""" self._has_spatial_topology = False @@ -159,7 +159,7 @@ def is_spatial_topology_build(self): """Check if the temporal topology was build""" return self._has_spatial_topology - def append_equivalent(self, map): + def append_equivalent(self, map) -> None: """Append a map with equivalent spatial extent as this map :param map: This object should be of type AbstractMapDataset @@ -178,7 +178,7 @@ def get_equivalent(self): return None return self._spatial_topology["EQUIVALENT"] - def append_overlap(self, map): + def append_overlap(self, map) -> None: """Append a map that this spatial overlap with this map :param map: This object should be of type AbstractMapDataset @@ -197,7 +197,7 @@ def get_overlap(self): return None return self._spatial_topology["OVERLAP"] - def append_in(self, map): + def append_in(self, map) -> None: """Append a map that this is spatial in this map :param map: This object should be of type AbstractMapDataset @@ -216,7 +216,7 @@ def get_in(self): return None return self._spatial_topology["IN"] - def append_contain(self, map): + def append_contain(self, map) -> None: """Append a map that this map spatially contains :param map: This object should be of type AbstractMapDataset @@ -235,7 +235,7 @@ def get_contain(self): return None return self._spatial_topology["CONTAIN"] - def append_meet(self, map): + def append_meet(self, map) -> None: """Append a map that spatially meet with this map :param map: This object should be of type AbstractMapDataset @@ -254,7 +254,7 @@ def get_meet(self): return None return self._spatial_topology["MEET"] - def append_cover(self, map): + def append_cover(self, map) -> None: """Append a map that spatially cover this map :param map: This object should be of type AbstractMapDataset @@ -273,7 +273,7 @@ def get_cover(self): return None return self._spatial_topology["COVER"] - def append_covered(self, map): + def append_covered(self, map) -> None: """Append a map that is spatially covered by this map :param map: This object should be of type AbstractMapDataset @@ -292,11 +292,11 @@ def get_covered(self): return None return self._spatial_topology["COVERED"] - def _generate_map_list_string(self, map_list, line_wrap=True): + def _generate_map_list_string(self, map_list, line_wrap: bool = True): count = 0 string = "" for map_ in map_list: - if line_wrap and count > 0 and count % 3 == 0: + if line_wrap and count > 0 and (count % 3 == 0): string += "\n | ............................ " count = 0 if count == 0: @@ -316,7 +316,7 @@ def _generate_map_list_string(self, map_list, line_wrap=True): contain = property(fget=get_contain, fset=append_contain) meet = property(fget=get_meet, fset=append_meet) - def print_spatial_topology_info(self): + def print_spatial_topology_info(self) -> None: """Print information about this class in human readable style""" print( @@ -359,7 +359,7 @@ def print_spatial_topology_info(self): + self._generate_map_list_string(self.meet) ) - def print_spatial_topology_shell_info(self): + def print_spatial_topology_shell_info(self) -> None: """Print information about this class in shell style""" if self.equivalent is not None: diff --git a/python/grass/temporal/spatio_temporal_relationships.py b/python/grass/temporal/spatio_temporal_relationships.py index d22bc2bb0da..25b666e1968 100644 --- a/python/grass/temporal/spatio_temporal_relationships.py +++ b/python/grass/temporal/spatio_temporal_relationships.py @@ -377,28 +377,28 @@ class SpatioTemporalTopologyBuilder: """ # noqa: E501 - def __init__(self): + def __init__(self) -> None: self._reset() # 0001-01-01 00:00:00 self._timeref = datetime(1, 1, 1) - def _reset(self): + def _reset(self) -> None: self._store = {} self._first = None self._iteratable = False - def _set_first(self, first): + def _set_first(self, first) -> None: self._first = first self._insert(first) - def _detect_first(self): + def _detect_first(self) -> None: if len(self) > 0: prev_ = list(self._store.values())[0] while prev_ is not None: self._first = prev_ prev_ = prev_.prev() - def _insert(self, t): + def _insert(self, t) -> None: self._store[t.get_id()] = t def get_first(self): @@ -408,7 +408,7 @@ def get_first(self): """ return self._first - def _build_internal_iteratable(self, maps, spatial): + def _build_internal_iteratable(self, maps, spatial) -> None: """Build an iteratable temporal topology structure for all maps in the list and store the maps internally @@ -428,7 +428,7 @@ def _build_internal_iteratable(self, maps, spatial): # Detect the first map self._detect_first() - def _build_iteratable(self, maps, spatial): + def _build_iteratable(self, maps, spatial) -> None: """Build an iteratable temporal topology structure for all maps in the list @@ -529,7 +529,7 @@ def _build_rtree(self, maps, spatial=None): return tree - def build(self, mapsA, mapsB=None, spatial=None): + def build(self, mapsA, mapsB=None, spatial=None) -> None: """Build the spatio-temporal topology structure between one or two unordered lists of abstract dataset objects @@ -586,7 +586,7 @@ def build(self, mapsA, mapsB=None, spatial=None): A = mapsA[i] B = mapsB[j] - set_temoral_relationship(A, B, relation) + set_temporal_relationship(A, B, relation) if spatial is not None: relation = mapsB[j].spatial_relation(mapsA[i]) @@ -609,17 +609,17 @@ def __iter__(self): def __getitem__(self, index): return self._store[index.get_id()] - def __len__(self): + def __len__(self) -> int: return len(self._store) - def __contains__(self, _map): - return _map in self._store.values() + def __contains__(self, map_) -> bool: + return map_ in self._store.values() ############################################################################### -def set_temoral_relationship(A, B, relation): +def set_temporal_relationship(A, B, relation) -> None: if relation in {"equal", "equals"}: if A != B: if not B.get_equal() or (B.get_equal() and A not in B.get_equal()): @@ -685,7 +685,7 @@ def set_temoral_relationship(A, B, relation): ############################################################################### -def set_spatial_relationship(A, B, relation): +def set_spatial_relationship(A, B, relation) -> None: if relation == "equivalent": if A != B: if not B.get_equivalent() or ( @@ -731,7 +731,7 @@ def set_spatial_relationship(A, B, relation): ############################################################################### -def print_temporal_topology_relationships(maps1, maps2=None, dbif=None): +def print_temporal_topology_relationships(maps1, maps2=None, dbif=None) -> None: """Print the temporal relationships of the map lists maps1 and maps2 to stdout. @@ -761,7 +761,7 @@ def print_temporal_topology_relationships(maps1, maps2=None, dbif=None): def print_spatio_temporal_topology_relationships( maps1, maps2=None, spatial="2D", dbif=None -): +) -> None: """Print the temporal relationships of the map lists maps1 and maps2 to stdout. @@ -832,13 +832,13 @@ def count_temporal_topology_relationships(maps1, maps2=None, dbif=None): def create_temporal_relation_sql_where_statement( start, end, - use_start=True, - use_during=False, - use_overlap=False, - use_contain=False, - use_equal=False, - use_follows=False, - use_precedes=False, + use_start: bool = True, + use_during: bool = False, + use_overlap: bool = False, + use_contain: bool = False, + use_equal: bool = False, + use_follows: bool = False, + use_precedes: bool = False, ): """Create a SQL WHERE statement for temporal relation selection of maps in space time datasets diff --git a/python/grass/temporal/stds_export.py b/python/grass/temporal/stds_export.py index a01b94e1071..c5f546aa3a4 100644 --- a/python/grass/temporal/stds_export.py +++ b/python/grass/temporal/stds_export.py @@ -53,7 +53,7 @@ def _export_raster_maps_as_gdal( rows, tar, list_file, new_cwd, fs, format_, type_, **kwargs -): +) -> None: kwargs = {key: value for key, value in kwargs.items() if value is not None} for row in rows: name = row["name"] @@ -148,7 +148,7 @@ def _export_raster_maps_as_gdal( ############################################################################ -def _export_raster_maps(rows, tar, list_file, new_cwd, fs): +def _export_raster_maps(rows, tar, list_file, new_cwd, fs) -> None: for row in rows: name = row["name"] start = row["start_time"] @@ -173,7 +173,7 @@ def _export_raster_maps(rows, tar, list_file, new_cwd, fs): ############################################################################ -def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs): +def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs) -> None: for row in rows: name = row["name"] start = row["start_time"] @@ -207,7 +207,7 @@ def _export_vector_maps_as_gml(rows, tar, list_file, new_cwd, fs): ############################################################################ -def _export_vector_maps_as_gpkg(rows, tar, list_file, new_cwd, fs): +def _export_vector_maps_as_gpkg(rows, tar, list_file, new_cwd, fs) -> None: for row in rows: name = row["name"] start = row["start_time"] @@ -242,7 +242,7 @@ def _export_vector_maps_as_gpkg(rows, tar, list_file, new_cwd, fs): ############################################################################ -def _export_vector_maps(rows, tar, list_file, new_cwd, fs): +def _export_vector_maps(rows, tar, list_file, new_cwd, fs) -> None: for row in rows: name = row["name"] start = row["start_time"] @@ -276,7 +276,7 @@ def _export_vector_maps(rows, tar, list_file, new_cwd, fs): ############################################################################ -def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs): +def _export_raster3d_maps(rows, tar, list_file, new_cwd, fs) -> None: for row in rows: name = row["name"] start = row["start_time"] @@ -310,7 +310,7 @@ def export_stds( type_="strds", datatype=None, **kwargs, -): +) -> None: """Export space time datasets as tar archive with optional compression This method should be used to export space time datasets diff --git a/python/grass/temporal/stds_import.py b/python/grass/temporal/stds_import.py index 006ee6a387e..6cf64ac6012 100644 --- a/python/grass/temporal/stds_import.py +++ b/python/grass/temporal/stds_import.py @@ -54,8 +54,15 @@ def _import_raster_maps_from_gdal( - maplist, overr, exp, location, link, format_, set_current_region=False, memory=300 -): + maplist, + overr, + exp, + location, + link, + format_, + set_current_region: bool = False, + memory=300, +) -> None: impflags = "" if overr: impflags += "o" @@ -113,7 +120,7 @@ def _import_raster_maps_from_gdal( ############################################################################ -def _import_raster_maps(maplist, set_current_region=False): +def _import_raster_maps(maplist, set_current_region: bool = False) -> None: # We need to disable the projection check because of its # simple implementation impflags = "o" @@ -143,7 +150,7 @@ def _import_raster_maps(maplist, set_current_region=False): ############################################################################ -def _import_vector_maps_from_gml(maplist, overr, exp, location, link): +def _import_vector_maps_from_gml(maplist, overr, exp, location, link) -> None: impflags = "o" if exp or location: impflags += "e" @@ -169,7 +176,7 @@ def _import_vector_maps_from_gml(maplist, overr, exp, location, link): ############################################################################ -def _import_vector_maps(maplist): +def _import_vector_maps(maplist) -> None: # We need to disable the projection check because of its # simple implementation impflags = "o" @@ -208,15 +215,15 @@ def import_stds( title=None, descr=None, location=None, - link=False, - exp=False, - overr=False, - create=False, + link: bool = False, + exp: bool = False, + overr: bool = False, + create: bool = False, stds_type="strds", base=None, - set_current_region=False, + set_current_region: bool = False, memory=300, -): +) -> None: """Import space time datasets of type raster and vector :param input: Name of the input archive file @@ -240,7 +247,7 @@ def import_stds( :param memory: Cache size for raster rows, used in r.in.gdal """ - old_state = gs.raise_on_error + old_state = gs.get_raise_on_error() gs.set_raise_on_error(True) # Check if input file and extraction directory exits @@ -404,13 +411,14 @@ def import_stds( mapname = filename mapid = mapname + "@" + mapset - row = {} - row["filename"] = filename - row["name"] = mapname - row["id"] = mapid - row["start"] = line_list[1].strip() - row["end"] = line_list[2].strip() - row["semantic_label"] = line_list[3].strip() if len(line_list) == 4 else "" + row = { + "filename": filename, + "name": mapname, + "id": mapid, + "start": line_list[1].strip(), + "end": line_list[2].strip(), + "semantic_label": line_list[3].strip() if len(line_list) == 4 else "", + } new_list_file.write( f"{mapname}{fs}{row['start']}{fs}{row['end']}" diff --git a/python/grass/temporal/temporal_algebra.py b/python/grass/temporal/temporal_algebra.py index a27fa91cd1d..6c110205d3c 100644 --- a/python/grass/temporal/temporal_algebra.py +++ b/python/grass/temporal/temporal_algebra.py @@ -439,6 +439,10 @@ """ +from __future__ import annotations + +from typing import Literal + try: from ply import lex, yacc except: @@ -655,7 +659,7 @@ def t_LIST(self, t): # pass # Track line numbers. - def t_newline(self, t): + def t_newline(self, t) -> None: r"\n+" t.lineno += len(t.value) @@ -685,13 +689,13 @@ def t_error(self, t): ) # Build the lexer - def build(self, **kwargs): + def build(self, **kwargs) -> None: self.lexer = lex.lex( module=self, optimize=False, nowarn=True, debug=0, **kwargs ) # Just for testing - def test(self, data): + def test(self, data) -> None: self.name_list = {} print(data) self.lexer.input(data) @@ -711,7 +715,7 @@ class GlobalTemporalVar: if-statements can be stored in this class. """ - def __init__(self): + def __init__(self) -> None: self.tfunc = None self.compop = None self.value = None @@ -720,7 +724,7 @@ def __init__(self): self.topology = [] self.td = None - def get_type(self): + def get_type(self) -> Literal["global", "boolean", "operator", "timediff"] | None: if ( self.tfunc is not None and self.compop is not None @@ -749,15 +753,15 @@ def get_type_value(self): return valuelist - def __str__(self): + def __str__(self) -> str: return str(self.tfunc) + str(self.compop) + str(self.value) class FatalError(Exception): - def __init__(self, msg): + def __init__(self, msg) -> None: self.value = msg - def __str__(self): + def __str__(self) -> str: return self.value @@ -782,15 +786,15 @@ class TemporalAlgebraParser: def __init__( self, - pid=None, - run=True, - debug=False, - spatial=False, - register_null=False, - dry_run=False, - nprocs=1, + pid: int | None = None, + run: bool = True, + debug: bool = False, + spatial: bool = False, + register_null: bool = False, + dry_run: bool = False, + nprocs: int = 1, time_suffix=None, - ): + ) -> None: self.run = run # Compute the processes and output but Do not start the processes self.dry_run = dry_run @@ -846,11 +850,13 @@ def __init__( "MEET", ] - def __del__(self): + def __del__(self) -> None: if self.dbif.connected: self.dbif.close() - def setup_common_granularity(self, expression, stdstype="strds", lexer=None): + def setup_common_granularity( + self, expression, stdstype="strds", lexer=None + ) -> bool: """Configure the temporal algebra to use the common granularity of all space time datasets in the expression to generate the map lists. @@ -958,7 +964,7 @@ def parse( maptype="rast", mapclass=RasterDataset, basename=None, - overwrite=False, + overwrite: bool = False, ): """Parse the algebra expression and run the computation @@ -998,7 +1004,12 @@ def generate_map_name(self): return name def generate_new_map( - self, base_map, bool_op="and", copy=True, rename=True, remove=False + self, + base_map, + bool_op="and", + copy: bool = True, + rename: bool = True, + remove: bool = False, ): """Generate a new map using the spatio-temporal extent of the base map @@ -1030,7 +1041,9 @@ def generate_new_map( map_new.uid = name return map_new - def overlay_map_extent(self, mapA, mapB, bool_op=None, temp_op="l", copy=False): + def overlay_map_extent( + self, mapA, mapB, bool_op=None, temp_op="l", copy: bool = False + ): """Compute the spatio-temporal extent of two topological related maps :param mapA: The first map @@ -1174,7 +1187,7 @@ def set_temporal_extent_list(self, maplist, topolist=["EQUAL"], temporal="l"): resultlist = resultdict.values() return sorted(resultlist, key=AbstractDatasetComparisonKeyStartTime) - def remove_maps(self): + def remove_maps(self) -> None: """Removes empty or intermediate maps of different type.""" map_names = {} @@ -1191,7 +1204,7 @@ def remove_maps(self): self.msgr.message(_("Removing un-needed or empty %s maps") % (key)) self._remove_maps(value, key) - def _remove_maps(self, namelist, map_type): + def _remove_maps(self, namelist, map_type) -> None: """Remove maps of specific type :param namelist: List of map names to be removed @@ -1213,7 +1226,9 @@ def _remove_maps(self, namelist, map_type): if self.dry_run is False: m.run() - def check_stds(self, input, clear=False, stds_type=None, check_type=True): + def check_stds( + self, input, clear: bool = False, stds_type=None, check_type: bool = True + ): """Check if input space time dataset exist in database and return its map list. :param input: Name of space time data set as string or list of maps. @@ -1389,9 +1404,9 @@ def build_spatio_temporal_topology_list( maplistA, maplistB=None, topolist=["EQUAL"], - assign_val=False, - count_map=False, - compare_bool=False, + assign_val: bool = False, + count_map: bool = False, + compare_bool: bool = False, compop=None, aggregate=None, ): @@ -1756,7 +1771,12 @@ def eval_toperator(self, operator, optype="relation"): return (p.relations, p.temporal, p.function, p.aggregate) def perform_temporal_selection( - self, maplistA, maplistB, topolist=["EQUAL"], inverse=False, assign_val=False + self, + maplistA, + maplistB, + topolist=["EQUAL"], + inverse: bool = False, + assign_val: bool = False, ): """This function performs temporal selection operation. @@ -2252,7 +2272,7 @@ def build_condition_list(self, tvarexpr, thenlist, topolist=["EQUAL"]): # Sort resulting list of maps chronological. return sorted(resultlist, key=AbstractDatasetComparisonKeyStartTime) - def eval_condition_list(self, maplist, inverse=False): + def eval_condition_list(self, maplist, inverse: bool = False): """This function evaluates conditional values of a map list. A recursive function is used to evaluate comparison statements from left to right in the given conditional list. @@ -2318,7 +2338,7 @@ def recurse_compare(conditionlist): return inverselist return resultlist - def p_statement_assign(self, t): + def p_statement_assign(self, t) -> None: # The expression should always return a list of maps # This function starts all the work and is the last one that is called from the # parser @@ -2551,24 +2571,24 @@ def p_statement_assign(self, t): if self.debug: print(t[1], "=", t[3]) - def p_stds_1(self, t): + def p_stds_1(self, t) -> None: # Definition of a space time dataset """ stds : NAME """ t[0] = t[1] - def p_paren_expr(self, t): + def p_paren_expr(self, t) -> None: """expr : LPAREN expr RPAREN""" t[0] = t[2] - def p_number(self, t): + def p_number(self, t) -> None: """number : INT | FLOAT """ t[0] = t[1] - def p_expr_strds_function(self, t): + def p_expr_strds_function(self, t) -> None: # Explicitly specify a space time raster dataset # R = A : strds(B) """ @@ -2581,7 +2601,7 @@ def p_expr_strds_function(self, t): if self.debug: print("Opening STRDS: ", t[0]) - def p_expr_str3ds_function(self, t): + def p_expr_str3ds_function(self, t) -> None: # Explicitly specify a space time raster dataset # R = A : str3ds(B) """ @@ -2594,7 +2614,7 @@ def p_expr_str3ds_function(self, t): if self.debug: print("Opening STR3DS: ", t[0]) - def p_expr_stvds_function(self, t): + def p_expr_stvds_function(self, t) -> None: # Explicitly specify a space time vector dataset # R = A : stvds(B) """ @@ -2709,7 +2729,7 @@ def p_expr_tmerge_function(self, t): if self.debug: print("merge(", t[3], ",", t[5], ")") - def p_t_hash(self, t): + def p_t_hash(self, t) -> None: """ t_hash_var : stds HASH stds | expr HASH stds @@ -2725,7 +2745,7 @@ def p_t_hash(self, t): ) t[0] = resultlist - def p_t_hash2(self, t): + def p_t_hash2(self, t) -> None: """ t_hash_var : stds T_HASH_OPERATOR stds | stds T_HASH_OPERATOR expr @@ -2742,13 +2762,13 @@ def p_t_hash2(self, t): ) t[0] = resultlist - def p_t_hash_paren(self, t): + def p_t_hash_paren(self, t) -> None: """ t_hash_var : LPAREN t_hash_var RPAREN """ t[0] = t[2] - def p_t_td_var(self, t): + def p_t_td_var(self, t) -> None: """ t_td_var : TD LPAREN stds RPAREN | TD LPAREN expr RPAREN @@ -2778,7 +2798,7 @@ def p_t_td_var(self, t): if self.debug: print("td(" + str(t[3]) + ")") - def p_t_time_var(self, t): + def p_t_time_var(self, t) -> None: # Temporal variables that return a double or integer value """ t_var : START_DOY @@ -2803,7 +2823,7 @@ def p_t_time_var(self, t): t[0] = t[1] - def p_compare_op(self, t): + def p_compare_op(self, t) -> None: # Compare operators that are supported for temporal expressions """ comp_op : CEQUALS @@ -2815,7 +2835,7 @@ def p_compare_op(self, t): """ t[0] = t[1] - def p_t_var_expr_td_hash(self, t): + def p_t_var_expr_td_hash(self, t) -> None: # Examples: # A # B == 2 # td(A) < 31 @@ -2848,7 +2868,7 @@ def p_t_var_expr_td_hash(self, t): if self.debug: print(t[1], t[2], t[3]) - def p_t_var_expr_number(self, t): + def p_t_var_expr_number(self, t) -> None: # Examples: # start_month(A) > 2 # start_day(B) < 14 @@ -2877,7 +2897,7 @@ def p_t_var_expr_number(self, t): if self.debug: print(t[1], t[3], t[5], t[6]) - def p_t_var_expr_time(self, t): + def p_t_var_expr_time(self, t) -> None: # Examples: # start_time(A) == "12:30:00" # start_date(B) <= "2001-01-01" @@ -2914,7 +2934,7 @@ def p_t_var_expr_time(self, t): if self.debug: print(t[1], t[3], t[5], t[6]) - def p_t_var_expr_comp(self, t): + def p_t_var_expr_comp(self, t) -> None: """ t_var_expr : t_var_expr AND AND t_var_expr | t_var_expr OR OR t_var_expr @@ -2946,7 +2966,7 @@ def p_t_var_expr_comp(self, t): if self.debug: print(t[1], t[2] + t[3], t[4]) - def p_t_var_expr_comp_op(self, t): + def p_t_var_expr_comp_op(self, t) -> None: """ t_var_expr : t_var_expr T_COMP_OPERATOR t_var_expr """ @@ -2976,7 +2996,7 @@ def p_t_var_expr_comp_op(self, t): if self.debug: print(t[1], t[2], t[3]) - def p_expr_t_select(self, t): + def p_expr_t_select(self, t) -> None: # Temporal equal selection # The temporal topology relation equals is implicit # Examples: @@ -3003,7 +3023,7 @@ def p_expr_t_select(self, t): if self.debug: print(str(t[1]), "* = ", t[1], t[2], t[3]) - def p_expr_t_not_select(self, t): + def p_expr_t_not_select(self, t) -> None: # Temporal equal selection # The temporal topology relation equals is implicit # Examples: @@ -3030,7 +3050,7 @@ def p_expr_t_not_select(self, t): if self.debug: print(t[1] + "* = ", t[1], t[2], t[3]) - def p_expr_t_select_operator(self, t): + def p_expr_t_select_operator(self, t) -> None: # Temporal equal selection # The temporal topology relation equals is implicit # Examples: @@ -3072,7 +3092,7 @@ def p_expr_t_select_operator(self, t): if self.debug: print(t[1] + "* = ", t[1], t[2], t[3]) - def p_expr_condition_if(self, t): + def p_expr_condition_if(self, t) -> None: # Examples # if( start_date() < "2005-06-01", A:B) """ @@ -3096,7 +3116,7 @@ def p_expr_condition_if(self, t): if self.debug: print(str(t[5]) + "* = ", "if condition", str(t[3]), " then ", str(t[5])) - def p_expr_condition_if_relation(self, t): + def p_expr_condition_if_relation(self, t) -> None: # Examples # if({equal} start_date() < "2005-06-01", A:B) """ @@ -3129,7 +3149,7 @@ def p_expr_condition_if_relation(self, t): str(t[7]), ) - def p_expr_condition_elif(self, t): + def p_expr_condition_elif(self, t) -> None: # Examples # if( start_date() < "2005-06-01", if(start_time() < "12:30:00", A:B), A!:B) """ @@ -3170,7 +3190,7 @@ def p_expr_condition_elif(self, t): str(t[7]), ) - def p_expr_condition_elif_relation(self, t): + def p_expr_condition_elif_relation(self, t) -> None: # Examples # if({equal}, start_date() < "2005-06-01", # if(start_time() < "12:30:00", A:B), A!:B) @@ -3230,7 +3250,7 @@ def p_expr_condition_elif_relation(self, t): str(t[9]), ) - def p_expr_t_buff(self, t): + def p_expr_t_buff(self, t) -> None: # Examples # buff_t(A : B, "10 minutes") # Select the part of A that is temporally # equal to B and create a buffer of 10 minutes @@ -3271,7 +3291,7 @@ def p_expr_t_buff(self, t): elif len(t) == 7: print(str(t[3]) + "* = buff_t(", str(t[3]), ",", str(t[5]), ")") - def p_expr_t_snap(self, t): + def p_expr_t_snap(self, t) -> None: # Examples # tsnap(A : B) # Snap the maps of A temporally. """ @@ -3290,7 +3310,7 @@ def p_expr_t_snap(self, t): if self.debug: print(str(t[3]) + "* = tsnap(", str(t[3]), ")") - def p_expr_t_shift(self, t): + def p_expr_t_shift(self, t) -> None: # Examples # tshift(A : B, "10 minutes") # Shift the selection from A temporally # by 10 minutes. @@ -3329,12 +3349,11 @@ def p_expr_t_shift(self, t): elif len(t) == 7: print(str(t[3]) + "* = tshift(", str(t[3]), ",", str(t[5]), ")") - def p_expr_time_const(self, t): + def p_expr_time_const(self, t) -> None: # Examples # start_doy(A, -1) # Get the start DOY from the preceding map # of the time series as a numerical constant # for the mapcalculator expression - """ expr : t_var LPAREN NAME COMMA INT RPAREN """ @@ -3351,7 +3370,7 @@ def p_expr_time_const(self, t): # Get map index and temporal extent. map_index = map_list.index(map_i) new_index = map_index + t_neighbour - if new_index < max_index and new_index >= 0: + if 0 <= new_index < max_index: # Get neighbouring map and set temporal extent. map_n = map_list[new_index] map_i_t_extent = map_i.get_temporal_extent() @@ -3378,7 +3397,8 @@ def p_error(self, t): "syntax error on line %d, position %i token %s near '%s' expression " "'%s'" % (t.lineno, t.lexpos, t.type, t.value, self.expression) ) - raise SyntaxError("Unexpected syntax error") + msg = "Unexpected syntax error" + raise SyntaxError(msg) if __name__ == "__main__": diff --git a/python/grass/temporal/temporal_extent.py b/python/grass/temporal/temporal_extent.py index 29594ea8b72..692632db08f 100644 --- a/python/grass/temporal/temporal_extent.py +++ b/python/grass/temporal/temporal_extent.py @@ -81,7 +81,7 @@ class TemporalExtent(SQLDatabaseInterface): """ - def __init__(self, table=None, ident=None, start_time=None, end_time=None): + def __init__(self, table=None, ident=None, start_time=None, end_time=None) -> None: SQLDatabaseInterface.__init__(self, table, ident) self.set_id(ident) @@ -861,7 +861,7 @@ def overlaps(self, extent) -> bool: ) def overlapped(self, extent) -> bool: - """Return True if this temporal extent (A) overlapps the provided + """Return True if this temporal extent (A) overlaps the provided temporal extent (B) :: @@ -967,16 +967,16 @@ def temporal_relation(self, extent): return "precedes" return None - def set_id(self, ident): + def set_id(self, ident) -> None: """Convenient method to set the unique identifier (primary key)""" self.ident = ident self.D["id"] = ident - def set_start_time(self, start_time): + def set_start_time(self, start_time) -> None: """Set the valid start time of the extent""" self.D["start_time"] = start_time - def set_end_time(self, end_time): + def set_end_time(self, end_time) -> None: """Set the valid end time of the extent""" self.D["end_time"] = end_time @@ -1007,13 +1007,13 @@ def get_end_time(self): start_time = property(fget=get_start_time, fset=set_start_time) end_time = property(fget=get_end_time, fset=set_end_time) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" # 0123456789012345678901234567890 print(" | Start time:................. " + str(self.get_start_time())) print(" | End time:................... " + str(self.get_end_time())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" print("start_time='{}'".format(str(self.get_start_time()))) print("end_time='{}'".format(str(self.get_end_time()))) @@ -1028,10 +1028,10 @@ class AbsoluteTemporalExtent(TemporalExtent): start_time and end_time must be of type datetime """ - def __init__(self, table=None, ident=None, start_time=None, end_time=None): + def __init__(self, table=None, ident=None, start_time=None, end_time=None) -> None: TemporalExtent.__init__(self, table, ident, start_time, end_time) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" # 0123456789012345678901234567890 print( @@ -1039,7 +1039,7 @@ def print_info(self): ) TemporalExtent.print_info(self) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" TemporalExtent.print_shell_info(self) @@ -1048,21 +1048,21 @@ def print_shell_info(self): class RasterAbsoluteTime(AbsoluteTemporalExtent): - def __init__(self, ident=None, start_time=None, end_time=None): + def __init__(self, ident=None, start_time=None, end_time=None) -> None: AbsoluteTemporalExtent.__init__( self, "raster_absolute_time", ident, start_time, end_time ) class Raster3DAbsoluteTime(AbsoluteTemporalExtent): - def __init__(self, ident=None, start_time=None, end_time=None): + def __init__(self, ident=None, start_time=None, end_time=None) -> None: AbsoluteTemporalExtent.__init__( self, "raster3d_absolute_time", ident, start_time, end_time ) class VectorAbsoluteTime(AbsoluteTemporalExtent): - def __init__(self, ident=None, start_time=None, end_time=None): + def __init__(self, ident=None, start_time=None, end_time=None) -> None: AbsoluteTemporalExtent.__init__( self, "vector_absolute_time", ident, start_time, end_time ) @@ -1122,17 +1122,17 @@ def __init__( end_time=None, granularity=None, map_time=None, - ): + ) -> None: AbsoluteTemporalExtent.__init__(self, table, ident, start_time, end_time) self.set_granularity(granularity) self.set_map_time(map_time) - def set_granularity(self, granularity): + def set_granularity(self, granularity) -> None: """Set the granularity of the space time dataset""" self.D["granularity"] = granularity - def set_map_time(self, map_time): + def set_map_time(self, map_time) -> None: """Set the type of the map time Registered maps may have different types of time: @@ -1171,14 +1171,14 @@ def get_map_time(self): granularity = property(fget=get_granularity, fset=set_granularity) map_time = property(fget=get_map_time, fset=set_map_time) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" AbsoluteTemporalExtent.print_info(self) # 0123456789012345678901234567890 print(" | Granularity:................ " + str(self.get_granularity())) print(" | Temporal type of maps:...... " + str(self.get_map_time())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" AbsoluteTemporalExtent.print_shell_info(self) print("granularity='{}'".format(str(self.get_granularity()))) @@ -1189,21 +1189,27 @@ def print_shell_info(self): class STRDSAbsoluteTime(STDSAbsoluteTime): - def __init__(self, ident=None, start_time=None, end_time=None, granularity=None): + def __init__( + self, ident=None, start_time=None, end_time=None, granularity=None + ) -> None: STDSAbsoluteTime.__init__( self, "strds_absolute_time", ident, start_time, end_time, granularity ) class STR3DSAbsoluteTime(STDSAbsoluteTime): - def __init__(self, ident=None, start_time=None, end_time=None, granularity=None): + def __init__( + self, ident=None, start_time=None, end_time=None, granularity=None + ) -> None: STDSAbsoluteTime.__init__( self, "str3ds_absolute_time", ident, start_time, end_time, granularity ) class STVDSAbsoluteTime(STDSAbsoluteTime): - def __init__(self, ident=None, start_time=None, end_time=None, granularity=None): + def __init__( + self, ident=None, start_time=None, end_time=None, granularity=None + ) -> None: STDSAbsoluteTime.__init__( self, "stvds_absolute_time", ident, start_time, end_time, granularity ) @@ -1251,11 +1257,11 @@ class RelativeTemporalExtent(TemporalExtent): def __init__( self, table=None, ident=None, start_time=None, end_time=None, unit=None - ): + ) -> None: TemporalExtent.__init__(self, table, ident, start_time, end_time) self.set_unit(unit) - def set_unit(self, unit): + def set_unit(self, unit) -> None: """Set the unit of the relative time. Valid units are: - years @@ -1295,7 +1301,7 @@ def temporal_relation(self, map): # Properties unit = property(fget=get_unit, fset=set_unit) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" # 0123456789012345678901234567890 print( @@ -1304,7 +1310,7 @@ def print_info(self): TemporalExtent.print_info(self) print(" | Relative time unit:......... " + str(self.get_unit())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" TemporalExtent.print_shell_info(self) print("unit=" + str(self.get_unit())) @@ -1314,21 +1320,21 @@ def print_shell_info(self): class RasterRelativeTime(RelativeTemporalExtent): - def __init__(self, ident=None, start_time=None, end_time=None, unit=None): + def __init__(self, ident=None, start_time=None, end_time=None, unit=None) -> None: RelativeTemporalExtent.__init__( self, "raster_relative_time", ident, start_time, end_time, unit ) class Raster3DRelativeTime(RelativeTemporalExtent): - def __init__(self, ident=None, start_time=None, end_time=None, unit=None): + def __init__(self, ident=None, start_time=None, end_time=None, unit=None) -> None: RelativeTemporalExtent.__init__( self, "raster3d_relative_time", ident, start_time, end_time, unit ) class VectorRelativeTime(RelativeTemporalExtent): - def __init__(self, ident=None, start_time=None, end_time=None, unit=None): + def __init__(self, ident=None, start_time=None, end_time=None, unit=None) -> None: RelativeTemporalExtent.__init__( self, "vector_relative_time", ident, start_time, end_time, unit ) @@ -1393,17 +1399,17 @@ def __init__( unit=None, granularity=None, map_time=None, - ): + ) -> None: RelativeTemporalExtent.__init__(self, table, ident, start_time, end_time, unit) self.set_granularity(granularity) self.set_map_time(map_time) - def set_granularity(self, granularity): + def set_granularity(self, granularity) -> None: """Set the granularity of the space time dataset""" self.D["granularity"] = granularity - def set_map_time(self, map_time): + def set_map_time(self, map_time) -> None: """Set the type of the map time Registered maps may have different types of time: @@ -1442,14 +1448,14 @@ def get_map_time(self): granularity = property(fget=get_granularity, fset=set_granularity) map_time = property(fget=get_map_time, fset=set_map_time) - def print_info(self): + def print_info(self) -> None: """Print information about this class in human readable style""" RelativeTemporalExtent.print_info(self) # 0123456789012345678901234567890 print(" | Granularity:................ " + str(self.get_granularity())) print(" | Temporal type of maps:...... " + str(self.get_map_time())) - def print_shell_info(self): + def print_shell_info(self) -> None: """Print information about this class in shell style""" RelativeTemporalExtent.print_shell_info(self) print("granularity=" + str(self.get_granularity())) @@ -1468,7 +1474,7 @@ def __init__( unit=None, granularity=None, map_time=None, - ): + ) -> None: STDSRelativeTime.__init__( self, "strds_relative_time", @@ -1490,7 +1496,7 @@ def __init__( unit=None, granularity=None, map_time=None, - ): + ) -> None: STDSRelativeTime.__init__( self, "str3ds_relative_time", @@ -1512,7 +1518,7 @@ def __init__( unit=None, granularity=None, map_time=None, - ): + ) -> None: STDSRelativeTime.__init__( self, "stvds_relative_time", diff --git a/python/grass/temporal/temporal_granularity.py b/python/grass/temporal/temporal_granularity.py index 173e4e715ea..537868eee31 100644 --- a/python/grass/temporal/temporal_granularity.py +++ b/python/grass/temporal/temporal_granularity.py @@ -37,7 +37,7 @@ ############################################################################### -def check_granularity_string(granularity, temporal_type): +def check_granularity_string(granularity, temporal_type) -> bool: """Check if the granularity string is valid :param granularity: The granularity string @@ -1176,7 +1176,7 @@ def gran_plural_unit(gran): ######################################################################## -def gran_to_gran(from_gran, to_gran="days", shell=False): +def gran_to_gran(from_gran, to_gran="days", shell: bool = False): """Converts the computed absolute granularity of a STDS to a smaller granularity based on the Gregorian calendar hierarchy that 1 year equals 12 months or 365.2425 days or 24 * 365.2425 hours or 86400 * diff --git a/python/grass/temporal/temporal_operator.py b/python/grass/temporal/temporal_operator.py index d99244c7c49..34f872508c1 100644 --- a/python/grass/temporal/temporal_operator.py +++ b/python/grass/temporal/temporal_operator.py @@ -228,7 +228,7 @@ class TemporalOperatorLexer: t_ignore = " \t\n" # Track line numbers. - def t_newline(self, t): + def t_newline(self, t) -> None: r"\n+" t.lineno += len(t.value) @@ -268,13 +268,13 @@ def t_error(self, t): ) # Build the lexer - def build(self, **kwargs): + def build(self, **kwargs) -> None: self.lexer = lex.lex( module=self, optimize=False, nowarn=True, debug=0, **kwargs ) # Just for testing - def test(self, data): + def test(self, data) -> None: self.name_list = {} print(data) self.lexer.input(data) @@ -291,7 +291,7 @@ def test(self, data): class TemporalOperatorParser: """The temporal operator class""" - def __init__(self): + def __init__(self) -> None: self.lexer = TemporalOperatorLexer() self.lexer.build() self.parser = yacc.yacc(module=self, debug=0) @@ -350,7 +350,7 @@ def p_relation_operator(self, t): | CLPAREN relationlist CRPAREN """ # Check for correct type. - if not self.optype == "relation": + if self.optype != "relation": raise SyntaxError('Wrong optype "%s" must be "relation"' % self.optype) # Set three operator components. @@ -372,7 +372,7 @@ def p_relation_bool_operator(self, t): | CLPAREN OR OR COMMA relationlist CRPAREN | CLPAREN AND AND COMMA relationlist CRPAREN """ - if not self.optype == "boolean": + if self.optype != "boolean": raise SyntaxError('Wrong optype "%s" must be "boolean"' % self.optype) # Set three operator components. @@ -399,7 +399,7 @@ def p_relation_bool_combi_operator(self, t): | CLPAREN AND AND COMMA relationlist COMMA OR CRPAREN | CLPAREN AND AND COMMA relationlist COMMA AND CRPAREN """ - if not self.optype == "boolean": + if self.optype != "boolean": raise SyntaxError('Wrong optype "%s" must be "boolean"' % self.optype) # Set three operator components. @@ -422,7 +422,7 @@ def p_relation_bool_combi_operator2(self, t): | CLPAREN OR OR COMMA relationlist COMMA temporal CRPAREN | CLPAREN AND AND COMMA relationlist COMMA temporal CRPAREN """ - if not self.optype == "boolean": + if self.optype != "boolean": raise SyntaxError('Wrong optype "%s" must be "boolean"' % self.optype) # Set three operator components. @@ -449,7 +449,7 @@ def p_relation_bool_combi_operator3(self, t): | CLPAREN AND AND COMMA relationlist COMMA OR COMMA temporal CRPAREN | CLPAREN AND AND COMMA relationlist COMMA AND COMMA temporal CRPAREN """ - if not self.optype == "boolean": + if self.optype != "boolean": raise SyntaxError('Wrong optype "%s" must be "relation"' % self.optype) # Set three operator components. @@ -475,7 +475,7 @@ def p_select_relation_operator(self, t): | CLPAREN select COMMA relation COMMA temporal CRPAREN | CLPAREN select COMMA relationlist COMMA temporal CRPAREN """ - if not self.optype == "select": + if self.optype != "select": raise SyntaxError('Wrong optype "%s" must be "select"' % self.optype) if len(t) == 4: @@ -512,7 +512,7 @@ def p_hash_relation_operator(self, t): | CLPAREN HASH COMMA relation COMMA temporal CRPAREN | CLPAREN HASH COMMA relationlist COMMA temporal CRPAREN """ - if not self.optype == "hash": + if self.optype != "hash": raise SyntaxError('Wrong optype "%s" must be "hash"' % self.optype) if len(t) == 4: @@ -549,7 +549,7 @@ def p_raster_relation_operator(self, t): | CLPAREN arithmetic COMMA relation COMMA temporal CRPAREN | CLPAREN arithmetic COMMA relationlist COMMA temporal CRPAREN """ - if not self.optype == "raster": + if self.optype != "raster": raise SyntaxError('Wrong optype "%s" must be "raster"' % self.optype) if len(t) == 4: @@ -586,7 +586,7 @@ def p_overlay_relation_operator(self, t): | CLPAREN overlay COMMA relation COMMA temporal CRPAREN | CLPAREN overlay COMMA relationlist COMMA temporal CRPAREN """ - if not self.optype == "overlay": + if self.optype != "overlay": raise SyntaxError('Wrong optype "%s" must be "overlay"' % self.optype) if len(t) == 4: @@ -611,7 +611,7 @@ def p_overlay_relation_operator(self, t): t[0] = t[2] - def p_relation(self, t): + def p_relation(self, t) -> None: # The list of relations. Temporal and spatial relations are supported """ relation : EQUAL @@ -634,7 +634,7 @@ def p_relation(self, t): """ t[0] = t[1] - def p_over(self, t): + def p_over(self, t) -> None: # The the over keyword """ relation : OVER @@ -642,7 +642,7 @@ def p_over(self, t): over_list = ["overlaps", "overlapped"] t[0] = over_list - def p_relationlist(self, t): + def p_relationlist(self, t) -> None: # The list of relations. """ relationlist : relation OR relation @@ -656,7 +656,7 @@ def p_relationlist(self, t): rel_list.append(t[3]) t[0] = rel_list - def p_temporal_operator(self, t): + def p_temporal_operator(self, t) -> None: # The list of relations. """ temporal : LEFTREF @@ -667,7 +667,7 @@ def p_temporal_operator(self, t): """ t[0] = t[1] - def p_select_operator(self, t): + def p_select_operator(self, t) -> None: # The list of relations. """ select : T_SELECT @@ -675,7 +675,7 @@ def p_select_operator(self, t): """ t[0] = t[1] - def p_arithmetic_operator(self, t): + def p_arithmetic_operator(self, t) -> None: # The list of relations. """ arithmetic : MOD @@ -686,7 +686,7 @@ def p_arithmetic_operator(self, t): """ t[0] = t[1] - def p_overlay_operator(self, t): + def p_overlay_operator(self, t) -> None: # The list of relations. """ overlay : AND diff --git a/python/grass/temporal/temporal_raster3d_algebra.py b/python/grass/temporal/temporal_raster3d_algebra.py index cd26f8cb220..fdf80661429 100644 --- a/python/grass/temporal/temporal_raster3d_algebra.py +++ b/python/grass/temporal/temporal_raster3d_algebra.py @@ -11,6 +11,8 @@ """ +from __future__ import annotations + try: from ply import yacc except ImportError: @@ -30,14 +32,14 @@ class TemporalRaster3DAlgebraParser(TemporalRasterBaseAlgebraParser): def __init__( self, - pid=None, - run=False, - debug=True, - spatial=False, - register_null=False, - dry_run=False, - nprocs=1, - ): + pid: int | None = None, + run: bool = False, + debug: bool = True, + spatial: bool = False, + register_null: bool = False, + dry_run: bool = False, + nprocs: int = 1, + ) -> None: TemporalRasterBaseAlgebraParser.__init__( self, pid=pid, @@ -52,7 +54,7 @@ def __init__( self.m_mapcalc = pymod.Module("r3.mapcalc") self.m_mremove = pymod.Module("g.remove") - def parse(self, expression, basename=None, overwrite=False): + def parse(self, expression, basename=None, overwrite: bool = False): # Check for space time dataset type definitions from temporal algebra lx = TemporalRasterAlgebraLexer() lx.build() @@ -81,14 +83,14 @@ def parse(self, expression, basename=None, overwrite=False): return self.process_chain_dict - def p_statement_assign(self, t): + def p_statement_assign(self, t) -> None: # The expression should always return a list of maps. """ statement : stds EQUALS expr """ TemporalRasterBaseAlgebraParser.p_statement_assign(self, t) - def p_ts_neighbor_operation(self, t): + def p_ts_neighbor_operation(self, t) -> None: # Examples: # A[1,0,-1] # B[-2] @@ -122,7 +124,7 @@ def p_ts_neighbor_operation(self, t): # Get map index and temporal extent. map_index = maplist.index(map_i) new_index = map_index + t_neighbor - if new_index < max_index and new_index >= 0: + if 0 <= new_index < max_index: map_i_t_extent = map_i.get_temporal_extent() # Get neighboring map and set temporal extent. map_n = maplist[new_index] diff --git a/python/grass/temporal/temporal_raster_algebra.py b/python/grass/temporal/temporal_raster_algebra.py index 1785183cc50..053c768ae73 100644 --- a/python/grass/temporal/temporal_raster_algebra.py +++ b/python/grass/temporal/temporal_raster_algebra.py @@ -52,6 +52,8 @@ """ +from __future__ import annotations + try: from ply import yacc except ImportError: @@ -71,15 +73,15 @@ class TemporalRasterAlgebraParser(TemporalRasterBaseAlgebraParser): def __init__( self, - pid=None, - run=False, - debug=True, - spatial=False, - register_null=False, - dry_run=False, - nprocs=1, + pid: int | None = None, + run: bool = False, + debug: bool = True, + spatial: bool = False, + register_null: bool = False, + dry_run: bool = False, + nprocs: int = 1, time_suffix=None, - ): + ) -> None: TemporalRasterBaseAlgebraParser.__init__( self, pid=pid, @@ -98,7 +100,7 @@ def __init__( self.m_mapcalc = pymod.Module("r.mapcalc") self.m_mremove = pymod.Module("g.remove") - def parse(self, expression, basename=None, overwrite=False): + def parse(self, expression, basename=None, overwrite: bool = False): # Check for space time dataset type definitions from temporal algebra lx = TemporalRasterAlgebraLexer() lx.build() @@ -127,14 +129,14 @@ def parse(self, expression, basename=None, overwrite=False): return self.process_chain_dict - def p_statement_assign(self, t): + def p_statement_assign(self, t) -> None: # The expression should always return a list of maps. """ statement : stds EQUALS expr """ TemporalRasterBaseAlgebraParser.p_statement_assign(self, t) - def p_ts_neighbour_operation(self, t): + def p_ts_neighbour_operation(self, t) -> None: # Spatial and temporal neighbour operations via indexing # Examples: # A[1,0] @@ -166,7 +168,7 @@ def p_ts_neighbour_operation(self, t): # Get map index and temporal extent. map_index = maplist.index(map_i) new_index = map_index + t_neighbour - if new_index < max_index and new_index >= 0: + if 0 <= new_index < max_index: map_i_t_extent = map_i.get_temporal_extent() # Get neighbouring map and set temporal extent. map_n = maplist[new_index] diff --git a/python/grass/temporal/temporal_raster_base_algebra.py b/python/grass/temporal/temporal_raster_base_algebra.py index bb58c8405f7..b533d96e6f6 100644 --- a/python/grass/temporal/temporal_raster_base_algebra.py +++ b/python/grass/temporal/temporal_raster_base_algebra.py @@ -41,6 +41,8 @@ """ +from __future__ import annotations + import copy import grass.pygrass.modules as pymod @@ -68,7 +70,7 @@ class TemporalRasterAlgebraLexer(TemporalAlgebraLexer): """Lexical analyzer for the GRASS GIS temporal algebra""" - def __init__(self): + def __init__(self) -> None: TemporalAlgebraLexer.__init__(self) # Supported r.mapcalc functions. @@ -170,15 +172,15 @@ class TemporalRasterBaseAlgebraParser(TemporalAlgebraParser): def __init__( self, - pid=None, - run=True, - debug=False, - spatial=False, - register_null=False, - dry_run=False, - nprocs=1, + pid: int | None = None, + run: bool = True, + debug: bool = False, + spatial: bool = False, + register_null: bool = False, + dry_run: bool = False, + nprocs: int = 1, time_suffix=None, - ): + ) -> None: TemporalAlgebraParser.__init__( self, pid=pid, @@ -203,15 +205,15 @@ def build_spatio_temporal_topology_list( maplistA, maplistB=None, topolist=["EQUAL"], - assign_val=False, - count_map=False, - compare_bool=False, - compare_cmd=False, + assign_val: bool = False, + count_map: bool = False, + compare_bool: bool = False, + compare_cmd: bool = False, compop=None, aggregate=None, - new=False, - convert=False, - operator_cmd=False, + new: bool = False, + convert: bool = False, + operator_cmd: bool = False, ): """Build temporal topology for two space time data sets, copy map objects for given relation into map list. @@ -387,7 +389,7 @@ def sub_cmdstring(map_i): else: try: map_sub = map_i.get_id() - except: + except AttributeError: map_sub = map_i return map_sub @@ -420,7 +422,7 @@ def compare_cmd_value( aggregate, temporal_topo_list=["EQUAL"], spatial_topo_list=[], - convert=False, + convert: bool = False, ): """Function to evaluate two map lists with boolean values by boolean comparison operator. @@ -550,7 +552,7 @@ def set_temporal_extent_list( maplist, topolist=["EQUAL"], temporal="l", - cmd_bool=False, + cmd_bool: bool = False, cmd_type=None, operator=None, ): @@ -643,7 +645,7 @@ def build_condition_cmd_list( condition_topolist=["EQUAL"], conclusion_topolist=["EQUAL"], temporal="l", - null=False, + null: bool = False, ): """This function build the r.mapcalc command strings for spatial conditionals. For Example: 'if(a1 == 1, b1, c2)' @@ -742,7 +744,7 @@ def build_condition_cmd_list( cmd_type="condition", ) - def p_statement_assign(self, t): + def p_statement_assign(self, t) -> None: # This function executes the processing of raster/raster3d algebra # that was build based on the expression """ @@ -982,7 +984,7 @@ def p_expr_spmap_function(self, t): if self.debug: print("map(" + t[3] + ")") - def p_arith1_operation(self, t): + def p_arith1_operation(self, t) -> None: # A % B # A / B # A * B @@ -1057,7 +1059,7 @@ def p_arith1_operation(self, t): for map in resultlist: print(map.cmd_list) - def p_arith1_operation_numeric1(self, t): + def p_arith1_operation_numeric1(self, t) -> None: # A % 1 # A / 4 # A * 5 @@ -1107,7 +1109,7 @@ def p_arith1_operation_numeric1(self, t): for map in resultlist: print(map.cmd_list) - def p_arith1_operation_numeric2(self, t): + def p_arith1_operation_numeric2(self, t) -> None: # 1 % A # 4 / A # 5 * A @@ -1157,7 +1159,7 @@ def p_arith1_operation_numeric2(self, t): for map in resultlist: print(map.cmd_list) - def p_arith2_operation(self, t): + def p_arith2_operation(self, t) -> None: # A + B # A - B # A + td(B) @@ -1226,7 +1228,7 @@ def p_arith2_operation(self, t): for map in resultlist: print(map.cmd_list) - def p_arith2_operation_numeric1(self, t): + def p_arith2_operation_numeric1(self, t) -> None: # A + 2 # A - 3 # A + map(b4) @@ -1268,7 +1270,7 @@ def p_arith2_operation_numeric1(self, t): for map in resultlist: print(map.cmd_list) - def p_arith2_operation_numeric2(self, t): + def p_arith2_operation_numeric2(self, t) -> None: # 2 + A # 3 - A # map(b2) + A @@ -1310,7 +1312,7 @@ def p_arith2_operation_numeric2(self, t): for map in resultlist: print(map.cmd_list) - def p_arith1_operation_relation(self, t): + def p_arith1_operation_relation(self, t) -> None: # A {*, equal, l} B # A {*, equal, l} td(B) # A {*, equal, l} B {/, during, r} C @@ -1349,7 +1351,7 @@ def p_arith1_operation_relation(self, t): for map in resultlist: print(map.cmd_list) - def p_arith2_operation_relation(self, t): + def p_arith2_operation_relation(self, t) -> None: # A {+, equal, l} B # A {+, equal, l} td(b) # A {+, equal, l} B {-, during, r} C @@ -1388,7 +1390,7 @@ def p_arith2_operation_relation(self, t): for map in resultlist: print(map.cmd_list) - def p_arith_operation_numeric_string(self, t): + def p_arith_operation_numeric_string(self, t) -> None: # 1 + 1 # 1 - 1 # 1 * 1 @@ -1408,7 +1410,7 @@ def p_arith_operation_numeric_string(self, t): if self.debug: print(numstring) - def p_mapcalc_function(self, t): + def p_mapcalc_function(self, t) -> None: # Supported mapcalc functions. """ mapcalc_arith : ABS @@ -1429,7 +1431,7 @@ def p_mapcalc_function(self, t): if self.debug: print(t[1]) - def p_mapcalc_operation1(self, t): + def p_mapcalc_operation1(self, t) -> None: # sin(A) # log(B) """ @@ -1458,7 +1460,7 @@ def p_mapcalc_operation1(self, t): for map in resultlist: print(map.cmd_list) - def p_mapexpr_operation(self, t): + def p_mapexpr_operation(self, t) -> None: # sin(map(a)) """ mapexpr : mapcalc_arith LPAREN mapexpr RPAREN @@ -1474,7 +1476,7 @@ def p_mapexpr_operation(self, t): if self.debug: print(mapstring) - def p_s_var_expr_1(self, t): + def p_s_var_expr_1(self, t) -> None: # isnull(A) """ s_var_expr : ISNULL LPAREN stds RPAREN @@ -1502,7 +1504,7 @@ def p_s_var_expr_1(self, t): for map in resultlist: print(map.cmd_list) - def p_s_var_expr_2(self, t): + def p_s_var_expr_2(self, t) -> None: # isntnull(A) """ s_var_expr : ISNTNULL LPAREN stds RPAREN @@ -1530,7 +1532,7 @@ def p_s_var_expr_2(self, t): for map in resultlist: print(map.cmd_list) - def p_s_var_expr_3(self, t): + def p_s_var_expr_3(self, t) -> None: # A <= 2 """ s_var_expr : stds comp_op number @@ -1558,7 +1560,7 @@ def p_s_var_expr_3(self, t): for map in resultlist: print(map.cmd_list) - def p_s_var_expr_4(self, t): + def p_s_var_expr_4(self, t) -> None: # exist(B) """ s_var_expr : EXIST LPAREN stds RPAREN @@ -1586,7 +1588,7 @@ def p_s_var_expr_4(self, t): for map in resultlist: print(map.cmd_list) - def p_s_var_expr_comp(self, t): + def p_s_var_expr_comp(self, t) -> None: # A <= 2 || B == 10 # A < 3 && A > 1 """ @@ -1621,7 +1623,7 @@ def p_s_var_expr_comp(self, t): for map in resultlist: print(map.cmd_list) - def p_s_var_expr_comp_op(self, t): + def p_s_var_expr_comp_op(self, t) -> None: # A <= 2 {||} B == 10 # A < 3 {&&, equal} A > 1 """ @@ -1655,7 +1657,7 @@ def p_s_var_expr_comp_op(self, t): for map in resultlist: print(map.cmd_list) - def p_s_expr_condition_if(self, t): + def p_s_expr_condition_if(self, t) -> None: # if(s_var_expr, B) # if(A == 1, B) """ @@ -1681,7 +1683,7 @@ def p_s_expr_condition_if(self, t): for map in resultlist: print(map.cmd_list) - def p_s_numeric_condition_if(self, t): + def p_s_numeric_condition_if(self, t) -> None: # if(s_var_expr, 1) # if(A == 5, 10) """ @@ -1712,7 +1714,7 @@ def p_s_numeric_condition_if(self, t): for map in resultlist: print(map.cmd_list) - def p_s_expr_condition_if_relation(self, t): + def p_s_expr_condition_if_relation(self, t) -> None: # if({equal||during}, s_var_expr, A) """ expr : IF LPAREN T_REL_OPERATOR COMMA s_var_expr COMMA stds RPAREN @@ -1740,7 +1742,7 @@ def p_s_expr_condition_if_relation(self, t): for map in resultlist: print(map.cmd_list) - def p_s_expr_condition_elif(self, t): + def p_s_expr_condition_elif(self, t) -> None: # if(s_var_expr, A, B) """ expr : IF LPAREN s_var_expr COMMA stds COMMA stds RPAREN @@ -1773,7 +1775,7 @@ def p_s_expr_condition_elif(self, t): for map in resultlist: print(map.cmd_list) - def p_s_numeric_condition_elif(self, t): + def p_s_numeric_condition_elif(self, t) -> None: # if(s_var_expr, 1, 2) # if(A == 5, 10, 0) """ @@ -1821,7 +1823,7 @@ def p_s_numeric_condition_elif(self, t): for map in resultlist: print(map.cmd_list) - def p_s_numeric_expr_condition_elif(self, t): + def p_s_numeric_expr_condition_elif(self, t) -> None: # if(s_var_expr, 1, A) # if(A == 5 && C > 5, A, null()) """ @@ -1876,7 +1878,7 @@ def p_s_numeric_expr_condition_elif(self, t): for map in resultlist: print(map.cmd_list) - def p_s_numeric_expr_condition_elif_relation(self, t): + def p_s_numeric_expr_condition_elif_relation(self, t) -> None: # if({during},s_var_expr, 1, A) # if({during}, A == 5, A, null()) """ @@ -1934,7 +1936,7 @@ def p_s_numeric_expr_condition_elif_relation(self, t): for map in resultlist: print(map.cmd_list) - def p_s_expr_condition_elif_relation(self, t): + def p_s_expr_condition_elif_relation(self, t) -> None: # if({equal||during}, s_var_expr, A, B) """ expr : IF LPAREN T_REL_OPERATOR COMMA s_var_expr COMMA stds COMMA stds RPAREN @@ -1970,7 +1972,7 @@ def p_s_expr_condition_elif_relation(self, t): for map in resultlist: print(map.cmd_list) - def p_ts_var_expr1(self, t): + def p_ts_var_expr1(self, t) -> None: # Combination of spatial and temporal conditional expressions. # Examples: # A <= 2 || start_date <= 2013-01-01 @@ -2016,7 +2018,7 @@ def p_ts_var_expr1(self, t): t[0] = resultlist - def p_hash_operation(self, t): + def p_hash_operation(self, t) -> None: # Calculate the number of maps within an interval of another map from a # second space time dataset. # A # B diff --git a/python/grass/temporal/temporal_topology_dataset_connector.py b/python/grass/temporal/temporal_topology_dataset_connector.py index a296d9d4d78..284271cf031 100644 --- a/python/grass/temporal/temporal_topology_dataset_connector.py +++ b/python/grass/temporal/temporal_topology_dataset_connector.py @@ -112,10 +112,10 @@ class TemporalTopologyDatasetConnector: """ - def __init__(self): + def __init__(self) -> None: self.reset_temporal_topology() - def reset_temporal_topology(self): + def reset_temporal_topology(self) -> None: """Reset any information about temporal topology""" self._temporal_topology = {} self._has_temporal_topology = False @@ -205,11 +205,11 @@ def get_number_of_temporal_relations(self): return relations - def set_temporal_topology_build_true(self): + def set_temporal_topology_build_true(self) -> None: """Same as name""" self._has_temporal_topology = True - def set_temporal_topology_build_false(self): + def set_temporal_topology_build_false(self) -> None: """Same as name""" self._has_temporal_topology = False @@ -217,7 +217,7 @@ def is_temporal_topology_build(self): """Check if the temporal topology was build""" return self._has_temporal_topology - def set_next(self, map): + def set_next(self, map) -> None: """Set the map that is temporally as closest located after this map. Temporally located means that the start time of the "next" map is @@ -229,7 +229,7 @@ def set_next(self, map): """ self._temporal_topology["NEXT"] = map - def set_prev(self, map): + def set_prev(self, map) -> None: """Set the map that is temporally as closest located before this map. Temporally located means that the start time of the "previous" map @@ -261,7 +261,7 @@ def prev(self): return None return self._temporal_topology["PREV"] - def append_equal(self, map): + def append_equal(self, map) -> None: """Append a map with equivalent temporal extent as this map :param map: This object should be of type AbstractMapDataset @@ -281,7 +281,7 @@ def get_equal(self): return None return self._temporal_topology["EQUAL"] - def append_starts(self, map): + def append_starts(self, map) -> None: """Append a map that this map temporally starts with :param map: This object should be of type AbstractMapDataset @@ -300,7 +300,7 @@ def get_starts(self): return None return self._temporal_topology["STARTS"] - def append_started(self, map): + def append_started(self, map) -> None: """Append a map that this map temporally started with :param map: This object should be of type AbstractMapDataset @@ -319,7 +319,7 @@ def get_started(self): return None return self._temporal_topology["STARTED"] - def append_finishes(self, map): + def append_finishes(self, map) -> None: """Append a map that this map temporally finishes with :param map: This object should be of type AbstractMapDataset @@ -338,7 +338,7 @@ def get_finishes(self): return None return self._temporal_topology["FINISHES"] - def append_finished(self, map): + def append_finished(self, map) -> None: """Append a map that this map temporally finished with :param map: This object should be of type AbstractMapDataset @@ -357,7 +357,7 @@ def get_finished(self): return None return self._temporal_topology["FINISHED"] - def append_overlaps(self, map): + def append_overlaps(self, map) -> None: """Append a map that this map temporally overlaps :param map: This object should be of type AbstractMapDataset @@ -376,7 +376,7 @@ def get_overlaps(self): return None return self._temporal_topology["OVERLAPS"] - def append_overlapped(self, map): + def append_overlapped(self, map) -> None: """Append a map that this map temporally overlapped :param map: This object should be of type AbstractMapDataset @@ -395,7 +395,7 @@ def get_overlapped(self): return None return self._temporal_topology["OVERLAPPED"] - def append_follows(self, map): + def append_follows(self, map) -> None: """Append a map that this map temporally follows :param map: This object should be of type AbstractMapDataset @@ -414,7 +414,7 @@ def get_follows(self): return None return self._temporal_topology["FOLLOWS"] - def append_precedes(self, map): + def append_precedes(self, map) -> None: """Append a map that this map temporally precedes :param map: This object should be of type AbstractMapDataset @@ -433,7 +433,7 @@ def get_precedes(self): return None return self._temporal_topology["PRECEDES"] - def append_during(self, map): + def append_during(self, map) -> None: """Append a map that this map is temporally located during This includes temporal relationships starts and finishes @@ -454,7 +454,7 @@ def get_during(self): return None return self._temporal_topology["DURING"] - def append_contains(self, map): + def append_contains(self, map) -> None: """Append a map that this map temporally contains This includes temporal relationships started and finished @@ -475,7 +475,7 @@ def get_contains(self): return None return self._temporal_topology["CONTAINS"] - def _generate_map_list_string(self, map_list, line_wrap=True): + def _generate_map_list_string(self, map_list, line_wrap: bool = True): count = 0 string = "" for map_ in map_list: @@ -503,7 +503,7 @@ def _generate_map_list_string(self, map_list, line_wrap=True): finishes = property(fget=get_finishes, fset=append_finishes) finished = property(fget=get_finished, fset=append_finished) - def print_temporal_topology_info(self): + def print_temporal_topology_info(self) -> None: """Print information about this class in human readable style""" print( @@ -570,7 +570,7 @@ def print_temporal_topology_info(self): + self._generate_map_list_string(self.finished) ) - def print_temporal_topology_shell_info(self): + def print_temporal_topology_shell_info(self) -> None: """Print information about this class in shell style""" if self.next() is not None: diff --git a/python/grass/temporal/temporal_vector_algebra.py b/python/grass/temporal/temporal_vector_algebra.py index 2f28da0c7e5..1cf981324f2 100644 --- a/python/grass/temporal/temporal_vector_algebra.py +++ b/python/grass/temporal/temporal_vector_algebra.py @@ -42,6 +42,8 @@ """ +from __future__ import annotations + try: from ply import yacc except ImportError: @@ -66,7 +68,7 @@ class TemporalVectorAlgebraLexer(TemporalAlgebraLexer): """Lexical analyzer for the GRASS GIS temporal vector algebra""" - def __init__(self): + def __init__(self) -> None: TemporalAlgebraLexer.__init__(self) # Buffer functions from v.buffer @@ -142,7 +144,13 @@ class TemporalVectorAlgebraParser(TemporalAlgebraParser): ), # 2 ) - def __init__(self, pid=None, run=False, debug=True, spatial=False): + def __init__( + self, + pid: int | None = None, + run: bool = False, + debug: bool = True, + spatial: bool = False, + ) -> None: TemporalAlgebraParser.__init__(self, pid, run, debug, spatial) self.m_overlay = pygrass.Module("v.overlay", quiet=True, run_=False) @@ -151,7 +159,7 @@ def __init__(self, pid=None, run=False, debug=True, spatial=False): self.m_mremove = pygrass.Module("g.remove", quiet=True, run_=False) self.m_buffer = pygrass.Module("v.buffer", quiet=True, run_=False) - def parse(self, expression, basename=None, overwrite=False): + def parse(self, expression, basename: str | None = None, overwrite: bool = False): # Check for space time dataset type definitions from temporal algebra lx = TemporalVectorAlgebraLexer() lx.build() @@ -183,15 +191,15 @@ def build_spatio_temporal_topology_list( maplistA, maplistB=None, topolist=["EQUAL"], - assign_val=False, - count_map=False, - compare_bool=False, - compare_cmd=False, + assign_val: bool = False, + count_map: bool = False, + compare_bool: bool = False, + compare_cmd: bool = False, compop=None, aggregate=None, - new=False, - convert=False, - overlay_cmd=False, + new: bool = False, + convert: bool = False, + overlay_cmd: bool = False, ): """Build temporal topology for two space time data sets, copy map objects for given relation into map list. @@ -205,9 +213,9 @@ def build_spatio_temporal_topology_list( :param count_map: Boolean if the number of topological related maps should be returned. :param compare_bool: Boolean for comparing boolean map values based on - related map list and compariosn operator. + related map list and comparison operator. :param compare_cmd: Boolean for comparing command list values based on - related map list and compariosn operator. + related map list and comparison operator. :param compop: Comparison operator, && or ||. :param aggregate: Aggregation operator for relation map list, & or |. :param new: Boolean if new temporary maps should be created. @@ -539,9 +547,9 @@ def p_statement_assign(self, t): ) for map_i in register_list: # Check if modules should be executed from command list. + map_i.load() if hasattr(map_i, "cmd_list") or hasattr(map_i, "is_new"): # Get meta data from grass database. - map_i.load() if map_i.is_in_db(dbif=dbif) and self.overwrite: # Update map in temporal database. map_i.update_all(dbif=dbif) @@ -560,7 +568,7 @@ def p_statement_assign(self, t): map_i.insert(dbif=dbif) else: # Map is original from an input STVDS - map_i.load() + pass # Register map in result space time dataset. if self.debug: print(map_i.get_temporal_extent_as_tuple()) @@ -573,7 +581,7 @@ def p_statement_assign(self, t): dbif.close() t[0] = t[3] - def p_overlay_operation(self, t): + def p_overlay_operation(self, t) -> None: """ expr : stds AND stds | expr AND stds @@ -620,7 +628,7 @@ def p_overlay_operation(self, t): if self.debug: print(str(t[1]) + t[2] + str(t[3])) - def p_overlay_operation_relation(self, t): + def p_overlay_operation_relation(self, t) -> None: """ expr : stds T_OVERLAY_OPERATOR stds | expr T_OVERLAY_OPERATOR stds @@ -651,7 +659,7 @@ def p_overlay_operation_relation(self, t): if self.debug: print(str(t[1]) + t[2] + str(t[3])) - def p_buffer_operation(self, t): + def p_buffer_operation(self, t) -> None: """ expr : buff_function LPAREN stds COMMA number RPAREN | buff_function LPAREN expr COMMA number RPAREN @@ -694,7 +702,7 @@ def p_buffer_operation(self, t): t[0] = resultlist - def p_buff_function(self, t): + def p_buff_function(self, t) -> None: """buff_function : BUFF_POINT | BUFF_LINE | BUFF_AREA diff --git a/python/grass/temporal/testsuite/test_register_function.py b/python/grass/temporal/testsuite/test_register_function.py index a58ec7d739a..ca2fec9d80f 100644 --- a/python/grass/temporal/testsuite/test_register_function.py +++ b/python/grass/temporal/testsuite/test_register_function.py @@ -13,14 +13,15 @@ import os import grass.script as gs -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestRasterRegisterFunctions(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" os.putenv("GRASS_OVERWRITE", "1") # Use always the current mapset as temporal database @@ -30,11 +31,11 @@ def setUpClass(cls): cls.runModule("g.region", n=80.0, s=0.0, e=120.0, w=0.0, t=1.0, b=0.0, res=10.0) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.del_temp_region() - def setUp(self): + def setUp(self) -> None: """Create the test maps and the space time raster datasets""" self.runModule( "r.mapcalc", overwrite=True, quiet=True, expression="register_map_1 = 1" @@ -68,7 +69,7 @@ def setUp(self): overwrite=True, ) - def tearDown(self): + def tearDown(self) -> None: """Remove maps from temporal database""" self.runModule( "t.unregister", @@ -86,7 +87,7 @@ def tearDown(self): self.strds_abs.delete() self.strds_rel.delete() - def test_absolute_time_strds_1(self): + def test_absolute_time_strds_1(self) -> None: """Test the registration of maps with absolute time in a space time raster dataset """ @@ -116,7 +117,7 @@ def test_absolute_time_strds_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 3)) - def test_absolute_time_strds_2(self): + def test_absolute_time_strds_2(self) -> None: """Test the registration of maps with absolute time in a space time raster dataset. The timestamps are set using the C-Interface beforehand, @@ -155,7 +156,7 @@ def test_absolute_time_strds_2(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 3)) - def test_absolute_time_strds_3(self): + def test_absolute_time_strds_3(self) -> None: """Test the registration of maps with absolute time in a space time raster dataset. The timestamps are set via method arguments and with the c-interface. The timestamps of the @@ -188,7 +189,7 @@ def test_absolute_time_strds_3(self): self.assertEqual(start, datetime.datetime(2001, 2, 1)) self.assertEqual(end, datetime.datetime(2001, 2, 2)) - def test_absolute_time_strds_4(self): + def test_absolute_time_strds_4(self) -> None: """Test the registration of maps with absolute time in a space time raster dataset. The timestamps are set via method arguments and with the c-interface. The timestamps of the method @@ -222,7 +223,7 @@ def test_absolute_time_strds_4(self): self.assertEqual(start, datetime.datetime(2001, 2, 1)) self.assertEqual(end, datetime.datetime(2001, 2, 2)) - def test_absolute_time_1(self): + def test_absolute_time_1(self) -> None: """Test the registration of maps with absolute time using register_maps_in_space_time_dataset() and register_map_object_list() """ @@ -260,7 +261,7 @@ def test_absolute_time_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 3)) - def test_absolute_time_2(self): + def test_absolute_time_2(self) -> None: """Test the registration of maps with absolute time using register_maps_in_space_time_dataset() and register_map_object_list() with empty map deletion @@ -305,7 +306,7 @@ def test_absolute_time_2(self): map_3 = tgis.VectorDataset("register_map_null@" + tgis.get_current_mapset()) self.assertEqual(map_3.map_exists(), False) - def test_history_raster(self): + def test_history_raster(self) -> None: """Test that raster maps are registered with the history (creator and creation time) of the raster map itself (and from a different mapset (PERMANENT) @@ -329,7 +330,7 @@ def test_history_raster(self): # Test that registered creator of the map is not the current user self.assertEqual(map_1.base.get_creator(), "helena") - def test_history_vector(self): + def test_history_vector(self) -> None: """Test that vector maps are registered with the history (creator and creation time) of the vector map itself (and from a different mapset (PERMANENT) @@ -353,7 +354,7 @@ def test_history_vector(self): # Test that registered creator of the map is not the current user self.assertTrue(map_1.base.get_creator(), "helena") - def test_absolute_time_3(self): + def test_absolute_time_3(self) -> None: """Test the registration of maps with absolute time. The timestamps are set using the C-Interface beforehand, so that the register function needs @@ -382,7 +383,7 @@ def test_absolute_time_3(self): start, end = map.get_absolute_time() self.assertEqual(start, datetime.datetime(2001, 1, 1, 18, 30, 1)) - def test_relative_time_strds_1(self): + def test_relative_time_strds_1(self) -> None: """Test the registration of maps with relative time in a space time raster dataset """ @@ -417,7 +418,7 @@ def test_relative_time_strds_1(self): self.assertEqual(end, 2) self.assertEqual(unit, "day") - def test_relative_time_strds_2(self): + def test_relative_time_strds_2(self) -> None: """Test the registration of maps with relative time in a space time raster dataset. The timestamps are set for the maps using the C-interface before registration. @@ -460,7 +461,7 @@ def test_relative_time_strds_2(self): self.assertEqual(end, 2000000) self.assertEqual(unit, "seconds") - def test_relative_time_1(self): + def test_relative_time_1(self) -> None: """Test the registration of maps with relative time""" tgis.register_maps_in_space_time_dataset( type="raster", @@ -486,7 +487,7 @@ def test_relative_time_1(self): self.assertEqual(end, 2) self.assertEqual(unit, "day") - def test_relative_time_2(self): + def test_relative_time_2(self) -> None: """Test the registration of maps with relative time""" tgis.register_maps_in_space_time_dataset( type="raster", @@ -512,7 +513,7 @@ def test_relative_time_2(self): self.assertEqual(end, 2000000) self.assertEqual(unit, "seconds") - def test_relative_time_3(self): + def test_relative_time_3(self) -> None: """Test the registration of maps with relative time. The timestamps are set beforehand using the C-interface. """ @@ -549,7 +550,7 @@ def test_relative_time_3(self): class TestVectorRegisterFunctions(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" os.putenv("GRASS_OVERWRITE", "1") # Use always the current mapset as temporal database @@ -559,11 +560,11 @@ def setUpClass(cls): cls.runModule("g.region", n=80.0, s=0.0, e=120.0, w=0.0, t=1.0, b=0.0, res=10.0) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.del_temp_region() - def setUp(self): + def setUp(self) -> None: """Create the test maps and the space time raster datasets""" self.runModule( "v.random", @@ -615,7 +616,7 @@ def setUp(self): overwrite=True, ) - def tearDown(self): + def tearDown(self) -> None: """Remove maps from temporal database""" self.runModule( "t.unregister", @@ -640,7 +641,7 @@ def tearDown(self): self.stvds_abs.delete() self.stvds_rel.delete() - def test_absolute_time_stvds_1(self): + def test_absolute_time_stvds_1(self) -> None: """Test the registration of maps with absolute time in a space time raster dataset """ @@ -670,7 +671,7 @@ def test_absolute_time_stvds_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 3)) - def test_absolute_time_stvds_2(self): + def test_absolute_time_stvds_2(self) -> None: """Test the registration of maps with absolute time in a space time raster dataset. The timestamps are set using the C-Interface beforehand, @@ -709,7 +710,7 @@ def test_absolute_time_stvds_2(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 3)) - def test_absolute_time_stvds_3(self): + def test_absolute_time_stvds_3(self) -> None: """Test the registration of maps with absolute time in a space time raster dataset. The timestamps are set via method arguments and with the C-interface. The timestamps of the method @@ -741,7 +742,7 @@ def test_absolute_time_stvds_3(self): self.assertEqual(start, datetime.datetime(2001, 2, 1)) self.assertEqual(end, datetime.datetime(2001, 2, 2)) - def test_absolute_time_1(self): + def test_absolute_time_1(self) -> None: """Register vector maps in the temporal database and in addition in a stvds using the object method @@ -781,7 +782,7 @@ def test_absolute_time_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 3)) - def test_absolute_time_2(self): + def test_absolute_time_2(self) -> None: """Register vector maps in the temporal database and in addition in a stvds using the object method deleting empty maps @@ -832,17 +833,17 @@ def test_absolute_time_2(self): class TestRegisterFails(TestCase): - def test_error_handling_1(self): + def test_error_handling_1(self) -> None: # start option is missing self.assertModuleFail( "t.register", input="test", end="2001-01-01", maps=("a", "b") ) - def test_error_handling_2(self): + def test_error_handling_2(self) -> None: # No input definition self.assertModuleFail("t.register", input="test", start="2001-01-01") - def test_error_handling_3(self): + def test_error_handling_3(self) -> None: # File and maps are mutually exclusive self.assertModuleFail( "t.register", @@ -852,17 +853,17 @@ def test_error_handling_3(self): file="maps.txt", ) - def test_error_handling_4(self): + def test_error_handling_4(self) -> None: # Increment needs start self.assertModuleFail( "t.register", input="test", increment="1 day", maps=("a", "b") ) - def test_error_handling_5(self): + def test_error_handling_5(self) -> None: # Interval needs start self.assertModuleFail("t.register", flags="i", input="test", maps=("a", "b")) - def test_error_handling_6(self): + def test_error_handling_6(self) -> None: # Increment and end are mutually exclusive self.assertModuleFail( "t.register", @@ -873,7 +874,7 @@ def test_error_handling_6(self): maps=("a", "b"), ) - def test_error_handling_7(self): + def test_error_handling_7(self) -> None: # Interval and end are mutually exclusive self.assertModuleFail( "t.register", @@ -887,7 +888,7 @@ def test_error_handling_7(self): class TestRegisterMapsetAccess(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" os.putenv("GRASS_OVERWRITE", "1") tgis.init() @@ -910,7 +911,7 @@ def setUpClass(cls): cls.del_temp_region() - def setUp(self): + def setUp(self) -> None: """Create the space time raster dataset""" self.strds_abs = tgis.open_new_stds( name="register_test_abs", @@ -958,7 +959,7 @@ def setUp(self): tgis.init() self.assertNotEqual(self.currmapset, tgis.get_current_mapset()) - def tearDown(self): + def tearDown(self) -> None: """Remove raster maps from current mapset""" # switch to old mapset @@ -986,7 +987,7 @@ def tearDown(self): ) gs.try_rmdir(mapset_path) - def test_mapset_access_1(self): + def test_mapset_access_1(self) -> None: """Test the registration of maps from a different mapset.""" self.strds_abs_2 = tgis.open_new_stds( diff --git a/python/grass/temporal/testsuite/test_temporal_doctests.py b/python/grass/temporal/testsuite/test_temporal_doctests.py index a0b2f4d2d9c..608bf9fff38 100644 --- a/python/grass/temporal/testsuite/test_temporal_doctests.py +++ b/python/grass/temporal/testsuite/test_temporal_doctests.py @@ -8,7 +8,7 @@ import grass.gunittest.case import grass.gunittest.main import grass.gunittest.utils -import grass.temporal +import grass.temporal as tgis doctest.DocFileCase = type( "DocFileCase", (grass.gunittest.case.TestCase,), dict(doctest.DocFileCase.__dict__) @@ -22,35 +22,31 @@ def load_tests(loader, tests, ignore): grass.gunittest.utils.do_doctest_gettext_workaround() - tests.addTests(doctest.DocTestSuite(grass.temporal.abstract_dataset)) - tests.addTests(doctest.DocTestSuite(grass.temporal.abstract_map_dataset)) - tests.addTests(doctest.DocTestSuite(grass.temporal.abstract_space_time_dataset)) - tests.addTests(doctest.DocTestSuite(grass.temporal.base)) - tests.addTests(doctest.DocTestSuite(grass.temporal.core)) - tests.addTests(doctest.DocTestSuite(grass.temporal.datetime_math)) + tests.addTests(doctest.DocTestSuite(tgis.abstract_dataset)) + tests.addTests(doctest.DocTestSuite(tgis.abstract_map_dataset)) + tests.addTests(doctest.DocTestSuite(tgis.abstract_space_time_dataset)) + tests.addTests(doctest.DocTestSuite(tgis.base)) + tests.addTests(doctest.DocTestSuite(tgis.core)) + tests.addTests(doctest.DocTestSuite(tgis.datetime_math)) # Unexpected error here - # tests.addTests(doctest.DocTestSuite(grass.temporal.list_stds)) - tests.addTests(doctest.DocTestSuite(grass.temporal.metadata)) - tests.addTests(doctest.DocTestSuite(grass.temporal.register)) - tests.addTests(doctest.DocTestSuite(grass.temporal.space_time_datasets)) - tests.addTests(doctest.DocTestSuite(grass.temporal.spatial_extent)) - tests.addTests( - doctest.DocTestSuite(grass.temporal.spatial_topology_dataset_connector) - ) - tests.addTests(doctest.DocTestSuite(grass.temporal.spatio_temporal_relationships)) - tests.addTests(doctest.DocTestSuite(grass.temporal.temporal_extent)) - tests.addTests(doctest.DocTestSuite(grass.temporal.temporal_granularity)) - tests.addTests( - doctest.DocTestSuite(grass.temporal.temporal_topology_dataset_connector) - ) + # tests.addTests(doctest.DocTestSuite(tgis.list_stds)) + tests.addTests(doctest.DocTestSuite(tgis.metadata)) + tests.addTests(doctest.DocTestSuite(tgis.register)) + tests.addTests(doctest.DocTestSuite(tgis.space_time_datasets)) + tests.addTests(doctest.DocTestSuite(tgis.spatial_extent)) + tests.addTests(doctest.DocTestSuite(tgis.spatial_topology_dataset_connector)) + tests.addTests(doctest.DocTestSuite(tgis.spatio_temporal_relationships)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_extent)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_granularity)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_topology_dataset_connector)) # Algebra is still very experimental - tests.addTests(doctest.DocTestSuite(grass.temporal.temporal_algebra)) - tests.addTests(doctest.DocTestSuite(grass.temporal.temporal_raster3d_algebra)) - tests.addTests(doctest.DocTestSuite(grass.temporal.temporal_raster_algebra)) - tests.addTests(doctest.DocTestSuite(grass.temporal.temporal_raster_base_algebra)) - tests.addTests(doctest.DocTestSuite(grass.temporal.temporal_operator)) - tests.addTests(doctest.DocTestSuite(grass.temporal.temporal_vector_algebra)) - tests.addTests(doctest.DocTestSuite(grass.temporal.c_libraries_interface)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_algebra)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_raster3d_algebra)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_raster_algebra)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_raster_base_algebra)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_operator)) + tests.addTests(doctest.DocTestSuite(tgis.temporal_vector_algebra)) + tests.addTests(doctest.DocTestSuite(tgis.c_libraries_interface)) return tests diff --git a/python/grass/temporal/testsuite/unittests_temporal_algebra.py b/python/grass/temporal/testsuite/unittests_temporal_algebra.py index 99ae3eaac00..30011d30f95 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_algebra.py +++ b/python/grass/temporal/testsuite/unittests_temporal_algebra.py @@ -9,16 +9,17 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalAlgebra(TestCase): """Class for testing temporal algebra""" @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -115,16 +116,16 @@ def setUpClass(cls): end="2001-01-04", ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", inputs="A,B,C,D", quiet=True) cls.del_temp_region() - def test_temporal_select1(self): + def test_temporal_select1(self) -> None: """Testing the temporal select operator with equal relations.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -143,7 +144,7 @@ def test_temporal_select1(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_temporal_select2(self): + def test_temporal_select2(self) -> None: """Testing the temporal select operator with equal relations.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -162,7 +163,7 @@ def test_temporal_select2(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_temporal_select3(self): + def test_temporal_select3(self) -> None: """Testing the temporal select operator with equal relations.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -181,7 +182,7 @@ def test_temporal_select3(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_temporal_select_operators1(self): + def test_temporal_select_operators1(self) -> None: """Testing the temporal select operator. Including temporal relations.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -203,7 +204,7 @@ def test_temporal_select_operators1(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_temporal_select_operators2(self): + def test_temporal_select_operators2(self) -> None: """Testing the temporal select operator. Including temporal relations.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -225,7 +226,7 @@ def test_temporal_select_operators2(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_temporal_select_operators3(self): + def test_temporal_select_operators3(self) -> None: """Testing the temporal select operator. Including temporal relations and negation operation.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) @@ -248,7 +249,7 @@ def test_temporal_select_operators3(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_temporal_select_operators4(self): + def test_temporal_select_operators4(self) -> None: """Testing the temporal select operator. Including temporal relations and temporal operators.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) @@ -276,7 +277,7 @@ def test_temporal_select_operators4(self): self.assertEqual(result_strds.check_temporal_topology(), False) self.assertEqual(result_strds.get_granularity(), "2 days") - def test_temporal_select_operators5(self): + def test_temporal_select_operators5(self) -> None: """Testing the temporal select operator. Including temporal relations and temporal operators.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) @@ -304,7 +305,7 @@ def test_temporal_select_operators5(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "2 days") - def test_temporal_extent1(self): + def test_temporal_extent1(self) -> None: """Testing the temporal extent operators.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -326,7 +327,7 @@ def test_temporal_extent1(self): self.assertEqual(result_strds.check_temporal_topology(), False) self.assertEqual(result_strds.get_granularity(), "2 days") - def test_temporal_extent2(self): + def test_temporal_extent2(self) -> None: """Testing the temporal extent operators.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -348,7 +349,7 @@ def test_temporal_extent2(self): self.assertEqual(result_strds.check_temporal_topology(), False) self.assertEqual(result_strds.get_granularity(), "2 days") - def test_temporal_extent3(self): + def test_temporal_extent3(self) -> None: """Testing the temporal extent operators.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -375,7 +376,7 @@ def test_temporal_extent3(self): self.assertEqual(result_strds.check_temporal_topology(), False) self.assertEqual(result_strds.get_granularity(), "2 days") - def test_temporal_hash1(self): + def test_temporal_hash1(self) -> None: """Testing the hash function in conditional statement.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -397,7 +398,7 @@ def test_temporal_hash1(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_temporal_hash_operator1(self): + def test_temporal_hash_operator1(self) -> None: """Testing the hash operator function in conditional statement.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -419,7 +420,7 @@ def test_temporal_hash_operator1(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_temporal_hash_operator2(self): + def test_temporal_hash_operator2(self) -> None: """Testing the hash operator function in conditional statement.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -441,7 +442,7 @@ def test_temporal_hash_operator2(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_tmap_function1(self): + def test_tmap_function1(self) -> None: """Testing the tmap function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -463,7 +464,7 @@ def test_tmap_function1(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_tmap_function2(self): + def test_tmap_function2(self) -> None: """Testing the tmap function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -485,7 +486,7 @@ def test_tmap_function2(self): self.assertEqual(result_strds.check_temporal_topology(), True) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_merge_function1(self): + def test_merge_function1(self) -> None: """Testing the merge function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -504,7 +505,7 @@ def test_merge_function1(self): self.assertEqual(result_strds.check_temporal_topology(), False) self.assertEqual(result_strds.get_granularity(), "1 day") - def test_merge_function2(self): + def test_merge_function2(self) -> None: """Testing the merge function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser(run=True, debug=True) temporal_algebra_parser.parse( @@ -531,7 +532,7 @@ class TestTemporalAlgebraDryRun(TestCase): """Class for testing dry runs of the temporal algebra""" @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -629,12 +630,12 @@ def setUpClass(cls): ) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", inputs="A,B,C,D", quiet=True) cls.del_temp_region() - def test_merge_function1(self): + def test_merge_function1(self) -> None: """Testing the merge function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser( run=True, debug=False, dry_run=True @@ -650,7 +651,7 @@ def test_merge_function1(self): self.assertEqual(parser_content["STDS"]["name"], "R") self.assertEqual(parser_content["STDS"]["stdstype"], "strds") - def test_merge_function2(self): + def test_merge_function2(self) -> None: """Testing the merge function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser( run=True, debug=False, dry_run=True @@ -666,7 +667,7 @@ def test_merge_function2(self): self.assertEqual(parser_content["STDS"]["name"], "R") self.assertEqual(parser_content["STDS"]["stdstype"], "strds") - def test_merge_function3(self): + def test_merge_function3(self) -> None: """Testing the merge function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser( run=True, debug=False, dry_run=True @@ -682,7 +683,7 @@ def test_merge_function3(self): self.assertEqual(parser_content["STDS"]["name"], "R") self.assertEqual(parser_content["STDS"]["stdstype"], "strds") - def test_shift1(self): + def test_shift1(self) -> None: """Testing the shift function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser( run=True, debug=False, dry_run=True @@ -698,7 +699,7 @@ def test_shift1(self): self.assertEqual(parser_content["STDS"]["name"], "R") self.assertEqual(parser_content["STDS"]["stdstype"], "strds") - def test_shift2(self): + def test_shift2(self) -> None: """Testing the shift function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser( run=True, debug=False, dry_run=True @@ -714,7 +715,7 @@ def test_shift2(self): self.assertEqual(parser_content["STDS"]["name"], "R") self.assertEqual(parser_content["STDS"]["stdstype"], "strds") - def test_buffer1(self): + def test_buffer1(self) -> None: """Testing the shift function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser( run=True, debug=False, dry_run=True @@ -730,7 +731,7 @@ def test_buffer1(self): self.assertEqual(parser_content["STDS"]["name"], "R") self.assertEqual(parser_content["STDS"]["stdstype"], "strds") - def test_buff2(self): + def test_buff2(self) -> None: """Testing the shift function.""" temporal_algebra_parser = tgis.TemporalAlgebraParser( run=True, debug=False, dry_run=True @@ -746,7 +747,7 @@ def test_buff2(self): self.assertEqual(parser_content["STDS"]["name"], "R") self.assertEqual(parser_content["STDS"]["stdstype"], "strds") - def test_time_constant(self): + def test_time_constant(self) -> None: """Testing the time constant functions.""" temporal_algebra_parser = tgis.TemporalAlgebraParser( run=True, debug=False, dry_run=True diff --git a/python/grass/temporal/testsuite/unittests_temporal_algebra_grs.py b/python/grass/temporal/testsuite/unittests_temporal_algebra_grs.py index d7a81bcb34d..e9ea1c3251c 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_algebra_grs.py +++ b/python/grass/temporal/testsuite/unittests_temporal_algebra_grs.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalAlgebraGranularity(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -131,17 +132,17 @@ def setUpClass(cls): end="2001-01-04", ) - def tearDown(self): + def tearDown(self) -> None: pass # self.runModule("t.remove", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" # cls.runModule("t.remove", flags="rf", inputs="A,B,C,D", quiet=True) cls.del_temp_region() - def test_common_granularity_1(self): + def test_common_granularity_1(self) -> None: """Testing the common granularity function.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) expr = "R = A : B" @@ -173,7 +174,7 @@ def test_common_granularity_1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 month") - def test_common_granularity_2(self): + def test_common_granularity_2(self) -> None: """Testing the common granularity function year to month samping.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) expr = "R = A : C" @@ -192,7 +193,7 @@ def test_common_granularity_2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 month") - def test_common_granularity_3(self): + def test_common_granularity_3(self) -> None: """Testing the common granularity function with gaps.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) expr = "R = A : D" @@ -211,7 +212,7 @@ def test_common_granularity_3(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 month") - def test_common_granularity_4(self): + def test_common_granularity_4(self) -> None: """Testing the common granularity function year to month with gaps.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) expr = "R = C : D" @@ -230,7 +231,7 @@ def test_common_granularity_4(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 month") - def test_common_granularity_4(self): + def test_common_granularity_4(self) -> None: """Testing the common granularity function year to month with gaps.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) expr = "R = C : D" @@ -249,7 +250,7 @@ def test_common_granularity_4(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 month") - def test_common_granularity_5(self): + def test_common_granularity_5(self) -> None: """Testing the common granularity function year to month with gaps.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) expr = "R = A : C : D" @@ -268,7 +269,7 @@ def test_common_granularity_5(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 month") - def test_common_granularity_6(self): + def test_common_granularity_6(self) -> None: """Testing the common granularity function year to month with gaps.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) expr = "R = if(start_month(A) > 2, A : C : D)" diff --git a/python/grass/temporal/testsuite/unittests_temporal_algebra_mixed_stds.py b/python/grass/temporal/testsuite/unittests_temporal_algebra_mixed_stds.py index ce911839049..81ad3b47b64 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_algebra_mixed_stds.py +++ b/python/grass/temporal/testsuite/unittests_temporal_algebra_mixed_stds.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalAlgebraMixedDatasets(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -92,14 +93,14 @@ def setUpClass(cls): ) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", type="str3ds", inputs="A", quiet=True) cls.runModule("t.remove", flags="rf", type="strds", inputs="B", quiet=True) cls.runModule("t.remove", flags="rf", type="stvds", inputs="C", quiet=True) cls.del_temp_region() - def test_temporal_select_operators1(self): + def test_temporal_select_operators1(self) -> None: """Testing the temporal select operator. Including temporal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -134,7 +135,7 @@ def test_temporal_select_operators1(self): self.assertEqual(pc["STDS"]["name"], "R") self.assertEqual(pc["STDS"]["stdstype"], "str3ds") - def test_temporal_select_operators2(self): + def test_temporal_select_operators2(self) -> None: """Testing the temporal select operator. Including temporal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -169,7 +170,7 @@ def test_temporal_select_operators2(self): self.assertEqual(pc["STDS"]["name"], "R") self.assertEqual(pc["STDS"]["stdstype"], "str3ds") - def test_temporal_select_operators3(self): + def test_temporal_select_operators3(self) -> None: """Testing the temporal select operator. Including temporal relations and negation operation.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) @@ -191,7 +192,7 @@ def test_temporal_select_operators3(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_select_operators4(self): + def test_temporal_select_operators4(self) -> None: """Testing the temporal select operator. Including temporal relations and temporal operators.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) @@ -211,7 +212,7 @@ def test_temporal_select_operators4(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_select_operators5(self): + def test_temporal_select_operators5(self) -> None: """Testing the temporal select operator. Including temporal relations and temporal operators.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) @@ -231,7 +232,7 @@ def test_temporal_select_operators5(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_hash_operator1(self): + def test_temporal_hash_operator1(self) -> None: """Testing the hash operator function in conditional statement.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -252,7 +253,7 @@ def test_temporal_hash_operator1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_hash_operator2(self): + def test_temporal_hash_operator2(self) -> None: """Testing the hash operator function in conditional statement.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -273,7 +274,7 @@ def test_temporal_hash_operator2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_different_stds_handling1(self): + def test_different_stds_handling1(self) -> None: """Testing the handling of different stds types as output.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -294,7 +295,7 @@ def test_different_stds_handling1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_different_stds_handling2(self): + def test_different_stds_handling2(self) -> None: """Testing the handling of different stds types as output.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True, dry_run=True) pc = ta.parse( diff --git a/python/grass/temporal/testsuite/unittests_temporal_conditionals.py b/python/grass/temporal/testsuite/unittests_temporal_conditionals.py index 800c7d30bf6..eacc31bd1f1 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_conditionals.py +++ b/python/grass/temporal/testsuite/unittests_temporal_conditionals.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalConditionals(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -134,16 +135,16 @@ def setUpClass(cls): end="2001-01-04", ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", inputs="A,B,C,D,E", quiet=True) cls.del_temp_region() - def test_temporal_condition_1(self): + def test_temporal_condition_1(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -163,7 +164,7 @@ def test_temporal_condition_1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_2(self): + def test_temporal_condition_2(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse(expression="R = if(td(A) == 1, A)", basename="r", overwrite=True) @@ -179,7 +180,7 @@ def test_temporal_condition_2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_3(self): + def test_temporal_condition_3(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -199,7 +200,7 @@ def test_temporal_condition_3(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_4(self): + def test_temporal_condition_4(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -219,7 +220,7 @@ def test_temporal_condition_4(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_5(self): + def test_temporal_condition_5(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -237,7 +238,7 @@ def test_temporal_condition_5(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_6(self): + def test_temporal_condition_6(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -257,7 +258,7 @@ def test_temporal_condition_6(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_7(self): + def test_temporal_condition_7(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -277,7 +278,7 @@ def test_temporal_condition_7(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_condition_8(self): + def test_temporal_condition_8(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -297,7 +298,7 @@ def test_temporal_condition_8(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_condition_9(self): + def test_temporal_condition_9(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -317,7 +318,7 @@ def test_temporal_condition_9(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_condition_10(self): + def test_temporal_condition_10(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -337,7 +338,7 @@ def test_temporal_condition_10(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_condition_11(self): + def test_temporal_condition_11(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -357,7 +358,7 @@ def test_temporal_condition_11(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_condition_12(self): + def test_temporal_condition_12(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -377,7 +378,7 @@ def test_temporal_condition_12(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_conditional_13(self): + def test_temporal_conditional_13(self) -> None: """Testing the hash operator function in conditional statement.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -398,7 +399,7 @@ def test_temporal_conditional_13(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_else_1(self): + def test_temporal_condition_else_1(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -418,7 +419,7 @@ def test_temporal_condition_else_1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_else_2(self): + def test_temporal_condition_else_2(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -438,7 +439,7 @@ def test_temporal_condition_else_2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_else_3(self): + def test_temporal_condition_else_3(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( @@ -458,7 +459,7 @@ def test_temporal_condition_else_3(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_condition_else_4(self): + def test_temporal_condition_else_4(self) -> None: """Testing the temporal select operator with equal relations.""" ta = tgis.TemporalAlgebraParser(run=True, debug=True) ta.parse( diff --git a/python/grass/temporal/testsuite/unittests_temporal_raster3d_algebra.py b/python/grass/temporal/testsuite/unittests_temporal_raster3d_algebra.py index f2d8a215bdb..090d9638f22 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_raster3d_algebra.py +++ b/python/grass/temporal/testsuite/unittests_temporal_raster3d_algebra.py @@ -10,14 +10,15 @@ import datetime import grass.script as gs -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalRaster3dAlgebra(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -49,16 +50,16 @@ def setUpClass(cls): interval=True, ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", type="str3ds", flags="rf", inputs="D", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" # cls.runModule("t.remove", type="str3ds", flags="rf", inputs="A", quiet=True) cls.del_temp_region() - def test_temporal_neighbors_1(self): + def test_temporal_neighbors_1(self) -> None: """Simple temporal neighborhood computation test""" A = tgis.open_old_stds("A", type="str3ds") A.print_info() @@ -76,7 +77,7 @@ def test_temporal_neighbors_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 2)) self.assertEqual(end, datetime.datetime(2001, 1, 4)) - def test_temporal_neighbors_2(self): + def test_temporal_neighbors_2(self) -> None: """Simple temporal neighborhood computation test""" A = tgis.open_old_stds("A", type="str3ds") A.print_info() diff --git a/python/grass/temporal/testsuite/unittests_temporal_raster_algebra.py b/python/grass/temporal/testsuite/unittests_temporal_raster_algebra.py index 430c9c7c18a..6dcf9267632 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_raster_algebra.py +++ b/python/grass/temporal/testsuite/unittests_temporal_raster_algebra.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalRasterAlgebra(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -116,17 +117,17 @@ def setUpClass(cls): end="2001-01-04", ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", flags="rf", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", inputs="A,B,C,D", quiet=True) cls.runModule("t.unregister", maps="singletmap", quiet=True) cls.del_temp_region() - def test_temporal_extent1(self): + def test_temporal_extent1(self) -> None: """Testing the temporal extent operators.""" ta = tgis.TemporalRasterAlgebraParser(run=True, debug=True) ta.parse(expression="R = A {:,during,r} C", basename="r", overwrite=True) @@ -152,7 +153,7 @@ def test_temporal_extent1(self): self.assertEqual(pc["STDS"]["name"], "R") self.assertEqual(pc["STDS"]["stdstype"], "strds") - def test_temporal_conditional_time_dimension_bug(self): + def test_temporal_conditional_time_dimension_bug(self) -> None: """Testing the conditional time dimension bug, that uses the time dimension of the conditional statement instead the time dimension of the then/else statement.""" @@ -174,7 +175,7 @@ def test_temporal_conditional_time_dimension_bug(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_simple_arith_hash_1(self): + def test_simple_arith_hash_1(self) -> None: """Simple arithmetic test including the hash operator using the granularity option for map name creation @@ -214,7 +215,7 @@ def test_simple_arith_hash_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_simple_arith_td_1(self): + def test_simple_arith_td_1(self) -> None: """Simple arithmetic test with time suffix option R = A + td(A) @@ -254,7 +255,7 @@ def test_simple_arith_td_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_simple_arith_td_2(self): + def test_simple_arith_td_2(self) -> None: """Simple arithmetic test R = A / td(A) @@ -280,7 +281,7 @@ def test_simple_arith_td_2(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_simple_arith_td_3(self): + def test_simple_arith_td_3(self) -> None: """Simple arithmetic test R = A {+,equal} td(A) @@ -307,7 +308,7 @@ def test_simple_arith_td_3(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_simple_arith_td_4(self): + def test_simple_arith_td_4(self) -> None: """Simple arithmetic test R = A {/, equal} td(A) @@ -334,7 +335,7 @@ def test_simple_arith_td_4(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_simple_arith_if_1(self): + def test_simple_arith_if_1(self) -> None: """Simple arithmetic test with if condition R = if({equal}, start_date(A) >= "2001-01-02", A + A) @@ -362,7 +363,7 @@ def test_simple_arith_if_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 2)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_simple_arith_if_2(self): + def test_simple_arith_if_2(self) -> None: """Simple arithmetic test with if condition R = if({equal}, A#A == 1, A - A) @@ -396,7 +397,7 @@ def test_simple_arith_if_2(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_complex_arith_if_1(self): + def test_complex_arith_if_1(self) -> None: """Complex arithmetic test with if condition R = if(start_date(A) < "2001-01-03" && A#A == 1, A{+, starts,l}C, A{+, finishes,l}C) @@ -426,7 +427,7 @@ def test_complex_arith_if_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 2)) self.assertEqual(end, datetime.datetime(2001, 1, 4)) - def test_simple_arith_1(self): + def test_simple_arith_1(self) -> None: """Simple arithmetic test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -442,7 +443,7 @@ def test_simple_arith_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_simple_arith_2(self): + def test_simple_arith_2(self) -> None: """Simple arithmetic test that creates an empty strds""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -452,7 +453,7 @@ def test_simple_arith_2(self): D.select() self.assertEqual(D.metadata.get_number_of_maps(), 0) - def test_simple_arith_3(self): + def test_simple_arith_3(self) -> None: """Simple arithmetic test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A / A + A*A/A", basename="r", overwrite=True) @@ -466,7 +467,7 @@ def test_simple_arith_3(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_temporal_intersection_1(self): + def test_temporal_intersection_1(self) -> None: """Simple temporal intersection test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A {+,equal,i} B", basename="r", overwrite=True) @@ -474,7 +475,7 @@ def test_temporal_intersection_1(self): D.select() self.assertEqual(D.metadata.get_number_of_maps(), 0) - def test_temporal_intersection_2(self): + def test_temporal_intersection_2(self) -> None: """Simple temporal intersection test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A {+,during,i} B", basename="r", overwrite=True) @@ -488,7 +489,7 @@ def test_temporal_intersection_2(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_temporal_intersection_3(self): + def test_temporal_intersection_3(self) -> None: """Simple temporal intersection test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A {+,starts,i} B", basename="r", overwrite=True) @@ -502,7 +503,7 @@ def test_temporal_intersection_3(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 4)) - def test_temporal_intersection_4(self): + def test_temporal_intersection_4(self) -> None: """Simple temporal intersection test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -518,7 +519,7 @@ def test_temporal_intersection_4(self): self.assertEqual(start, datetime.datetime(2001, 1, 2)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_temporal_intersection_5(self): + def test_temporal_intersection_5(self) -> None: """Simple temporal intersection test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -534,7 +535,7 @@ def test_temporal_intersection_5(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_temporal_intersection_6(self): + def test_temporal_intersection_6(self) -> None: """Simple temporal intersection test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = B {+,overlaps,u} C", basename="r", overwrite=True) @@ -548,7 +549,7 @@ def test_temporal_intersection_6(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 4)) - def test_temporal_intersection_7(self): + def test_temporal_intersection_7(self) -> None: """Simple temporal intersection test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = B {+,overlapped,u} C", basename="r", overwrite=True) @@ -562,7 +563,7 @@ def test_temporal_intersection_7(self): self.assertEqual(start, datetime.datetime(2001, 1, 2)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_temporal_intersection_8(self): + def test_temporal_intersection_8(self) -> None: """Simple temporal intersection test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -580,7 +581,7 @@ def test_temporal_intersection_8(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 1, 5)) - def test_temporal_neighbors_1(self): + def test_temporal_neighbors_1(self) -> None: """Simple temporal neighborhood computation test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A[-1] + A[1]", basename="r", overwrite=True) @@ -594,7 +595,7 @@ def test_temporal_neighbors_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 2)) self.assertEqual(end, datetime.datetime(2001, 1, 4)) - def test_temporal_neighbors_2(self): + def test_temporal_neighbors_2(self) -> None: """Simple temporal neighborhood computation test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A[0,0,-1] + A[0,0,1]", basename="r", overwrite=True) @@ -608,7 +609,7 @@ def test_temporal_neighbors_2(self): self.assertEqual(start, datetime.datetime(2001, 1, 2)) self.assertEqual(end, datetime.datetime(2001, 1, 4)) - def test_tmap_function1(self): + def test_tmap_function1(self) -> None: """Testing the tmap function.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = tmap(singletmap)", basename="r", overwrite=True) @@ -625,7 +626,7 @@ def test_tmap_function1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_tmap_function2(self): + def test_tmap_function2(self) -> None: """Testing the tmap function.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = tmap(singletmap) + 1", basename="r", overwrite=True) @@ -642,7 +643,7 @@ def test_tmap_function2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_map_function1(self): + def test_map_function1(self) -> None: """Testing the map function.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = map(singlemap) + A", basename="r", overwrite=True) @@ -659,7 +660,7 @@ def test_map_function1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_map_function2(self): + def test_map_function2(self) -> None: """Testing the map function.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A * map(singlemap)", basename="r", overwrite=True) @@ -676,8 +677,8 @@ def test_map_function2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_select(self): - """Testing the temporal select operator.""" + def test_temporal_select_same_left_right(self) -> None: + """Testing the temporal select operator with the same map for left and right.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A : A", basename="r", overwrite=True) @@ -692,7 +693,7 @@ def test_temporal_select(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_select(self): + def test_temporal_select(self) -> None: """Testing the temporal select operator.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A : D", basename="r", overwrite=True) @@ -708,7 +709,7 @@ def test_temporal_select(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_select_operators1(self): + def test_temporal_select_operators1(self) -> None: """Testing the temporal select operator. Including temporal relations.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A : D", basename="r", overwrite=True) @@ -724,7 +725,7 @@ def test_temporal_select_operators1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_select_operators2(self): + def test_temporal_select_operators2(self) -> None: """Testing the temporal select operator. Including temporal relations.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A {!:,during} C", basename="r", overwrite=True) @@ -740,7 +741,7 @@ def test_temporal_select_operators2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_select_operators3(self): + def test_temporal_select_operators3(self) -> None: """Testing the temporal select operator. Including temporal relations and different temporal operators (lr|+&)""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -757,7 +758,7 @@ def test_temporal_select_operators3(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_select_operators4(self): + def test_temporal_select_operators4(self) -> None: """Testing the temporal select operator. Including temporal relations and different temporal operators (lr|+&)""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -779,7 +780,7 @@ def test_temporal_select_operators4(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_hash_operator1(self): + def test_temporal_hash_operator1(self) -> None: """Testing the temporal hash operator in the raster algebra.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = if(A # D == 1, A)", basename="r", overwrite=True) @@ -795,7 +796,7 @@ def test_temporal_hash_operator1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_hash_operator2(self): + def test_temporal_hash_operator2(self) -> None: """Testing the temporal hash operator in the raster algebra.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A # D", basename="r", overwrite=True) @@ -811,7 +812,7 @@ def test_temporal_hash_operator2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_hash_operator3(self): + def test_temporal_hash_operator3(self) -> None: """Testing the temporal hash operator in the raster algebra.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = C {#,contains} A", basename="r", overwrite=True) @@ -827,7 +828,7 @@ def test_temporal_hash_operator3(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_hash_operator4(self): + def test_temporal_hash_operator4(self) -> None: """Testing the temporal hash operator in the raster algebra.""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -847,7 +848,7 @@ def test_temporal_hash_operator4(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_raster_arithmetic_relation_1(self): + def test_raster_arithmetic_relation_1(self) -> None: """Arithmetic test with temporal intersection""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = B {+,contains,l} A ", basename="r", overwrite=True) @@ -863,7 +864,7 @@ def test_raster_arithmetic_relation_1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_raster_arithmetic_relation_2(self): + def test_raster_arithmetic_relation_2(self) -> None: """Arithmetic test with temporal intersection""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = B {*,contains,l} A ", basename="r", overwrite=True) @@ -879,7 +880,7 @@ def test_raster_arithmetic_relation_2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_raster_arithmetic_relation_3(self): + def test_raster_arithmetic_relation_3(self) -> None: """Arithmetic test with temporal intersection""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = B {+,contains,l} A ", basename="r", overwrite=True) @@ -895,7 +896,7 @@ def test_raster_arithmetic_relation_3(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "2 days") - def test_raster_arithmetic_relation_4(self): + def test_raster_arithmetic_relation_4(self) -> None: """Arithmetic test with temporal intersection""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = B {+,contains,r} A ", basename="r", overwrite=True) @@ -911,7 +912,7 @@ def test_raster_arithmetic_relation_4(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_raster_arithmetic_relation_5(self): + def test_raster_arithmetic_relation_5(self) -> None: """Complex arithmetic test with temporal intersection""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -931,7 +932,7 @@ def test_raster_arithmetic_relation_5(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_capacity_1(self): + def test_capacity_1(self) -> None: """Arithmetic test with temporal intersection""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = (((((((A + A) - A) * A) / A) % A) - td(A)) - (A # A))" diff --git a/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_equal_ts.py b/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_equal_ts.py index f96ed12899f..d9e15fc2174 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_equal_ts.py +++ b/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_equal_ts.py @@ -9,15 +9,16 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test from grass.gunittest.utils import xfail_windows +import grass.temporal as tgis + class TestTemporalRasterAlgebraImplicitAggregation(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -55,18 +56,18 @@ def setUpClass(cls): type="raster", name=None, maps="singletmap", start="2001-01-01" ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", flags="rf", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", inputs="A", quiet=True) cls.runModule("t.unregister", maps="singletmap", quiet=True) cls.del_temp_region() @xfail_windows - def test_simple_operator(self): + def test_simple_operator(self) -> None: """Test implicit aggregation R = A + A @@ -93,7 +94,7 @@ def test_simple_operator(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), None) - def test_complex_operator(self): + def test_complex_operator(self) -> None: """Test implicit aggregation R = A {+,equal,l} A @@ -120,7 +121,7 @@ def test_complex_operator(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), None) - def test_single_map_complex_operator(self): + def test_single_map_complex_operator(self) -> None: """Test implicit aggregation R = singletmap {+,equal,l} A @@ -152,7 +153,7 @@ def test_single_map_complex_operator(self): self.assertEqual(D.get_granularity(), None) @xfail_windows - def test_single_map_simple_operator(self): + def test_single_map_simple_operator(self) -> None: """Test implicit aggregation R = singletmap + A @@ -179,7 +180,7 @@ def test_single_map_simple_operator(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), None) - def test_single_map_complex_operator_right_ts(self): + def test_single_map_complex_operator_right_ts(self) -> None: """Test implicit aggregation TODO: Is this the correct result? Implicit aggregation and full permutation? diff --git a/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_grs.py b/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_grs.py index 273176d14fa..4e575d97377 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_grs.py +++ b/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_grs.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalRasterAlgebra(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -141,20 +142,19 @@ def setUpClass(cls): end="2001-07-01", ) - def tearDown(self): + def tearDown(self) -> None: return self.runModule("t.remove", flags="rf", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: + """Remove the temporary region""" return - """Remove the temporary region - """ cls.runModule("t.remove", flags="rf", inputs="A,B,C,D", quiet=True) cls.runModule("t.unregister", maps="singletmap", quiet=True) cls.del_temp_region() - def test_1(self): + def test_1(self) -> None: """Simple arithmetik test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = if(C == 9, A - 1)" @@ -189,7 +189,7 @@ def test_1(self): self.assertEqual(pc["STDS"]["name"], "R") self.assertEqual(pc["STDS"]["stdstype"], "strds") - def test_2(self): + def test_2(self) -> None: """Simple arithmetik test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = A + B + C" @@ -224,7 +224,7 @@ def test_2(self): self.assertEqual(pc["STDS"]["name"], "R") self.assertEqual(pc["STDS"]["stdstype"], "strds") - def test_3(self): + def test_3(self) -> None: """Simple arithmetik test with null map""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = A + B + C + tmap(nullmap)" @@ -259,7 +259,7 @@ def test_3(self): self.assertEqual(pc["STDS"]["name"], "R") self.assertEqual(pc["STDS"]["stdstype"], "strds") - def test_4(self): + def test_4(self) -> None: """Simple arithmetik test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = if(D == 11, A - 1, A + 1)" @@ -281,7 +281,7 @@ def test_4(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_simple_arith_hash_1(self): + def test_simple_arith_hash_1(self) -> None: """Simple arithmetic test including the hash operator""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = A + (A # A)", basename="r", overwrite=True) @@ -295,7 +295,7 @@ def test_simple_arith_hash_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 7, 1)) - def test_simple_arith_hash_2(self): + def test_simple_arith_hash_2(self) -> None: """Simple arithmetic test including the hash operator""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = (A + A) # A", basename="r", overwrite=True) @@ -309,7 +309,7 @@ def test_simple_arith_hash_2(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 7, 1)) - def test_simple_arith_td_1(self): + def test_simple_arith_td_1(self) -> None: """Simple arithmetic test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = A + td(A:D)" @@ -331,7 +331,7 @@ def test_simple_arith_td_1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_simple_arith_if_1(self): + def test_simple_arith_if_1(self) -> None: """Simple arithmetic test with if condition""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = 'R = if(start_date(A) >= "2001-02-01", A + A)' @@ -351,7 +351,7 @@ def test_simple_arith_if_1(self): self.assertEqual(start, datetime.datetime(2001, 2, 1)) self.assertEqual(end, datetime.datetime(2001, 7, 1)) - def test_simple_arith_if_2(self): + def test_simple_arith_if_2(self) -> None: """Simple arithmetic test with if condition""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = if(A#A == 1, A - A)" @@ -371,7 +371,7 @@ def test_simple_arith_if_2(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 7, 1)) - def test_complex_arith_if_1(self): + def test_complex_arith_if_1(self) -> None: """Complex arithmetic test with if condition""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = 'R = if(start_date(A) < "2001-03-01" && A#A == 1, A+C, A-C)' @@ -391,7 +391,7 @@ def test_complex_arith_if_1(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 7, 1)) - def test_temporal_neighbors(self): + def test_temporal_neighbors(self) -> None: """Simple temporal neighborhood computation test""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = (A[0,0,-1] : D) + (A[0,0,1] : D)" @@ -411,7 +411,7 @@ def test_temporal_neighbors(self): self.assertEqual(start, datetime.datetime(2001, 1, 2)) self.assertEqual(end, datetime.datetime(2001, 5, 6)) - def test_map(self): + def test_map(self) -> None: """Test STDS + single map without timestamp""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = A + map(singletmap)" @@ -431,7 +431,7 @@ def test_map(self): self.assertEqual(start, datetime.datetime(2001, 1, 1)) self.assertEqual(end, datetime.datetime(2001, 7, 1)) - def test_tmap_map(self): + def test_tmap_map(self) -> None: """Test STDS + single map with and without timestamp""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) expr = "R = tmap(singletmap) + A + map(singletmap)" diff --git a/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_spatial_topology.py b/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_spatial_topology.py index 481eae5d59d..5f5c3557f26 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_spatial_topology.py +++ b/python/grass/temporal/testsuite/unittests_temporal_raster_algebra_spatial_topology.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalRasterAlgebraSpatialTopology(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -89,17 +90,17 @@ def setUpClass(cls): type="raster", name=None, maps="singletmap", start="2001-01-01" ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", flags="rf", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", inputs="A,B", quiet=True) cls.runModule("t.unregister", maps="singletmap", quiet=True) cls.del_temp_region() - def test_equal_equivalent_sum(self): + def test_equal_equivalent_sum(self) -> None: """Spatial topology distinction with equal timestamps STRDS A and B have identical time stamps, hence the differentiation @@ -131,7 +132,7 @@ def test_equal_equivalent_sum(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), None) - def test_equal_overlap_sum(self): + def test_equal_overlap_sum(self) -> None: """Spatial topology distinction with equal timestamps STRDS A and B have identical time stamps, hence the differentiation @@ -161,7 +162,7 @@ def test_equal_overlap_sum(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), None) - def test_equal_overlap_sum_with_null(self): + def test_equal_overlap_sum_with_null(self) -> None: """Spatial topology distinction with equal timestamps STRDS A and B have identical time stamps, hence the differentiation @@ -193,7 +194,7 @@ def test_equal_overlap_sum_with_null(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), None) - def test_equal_contain_sum(self): + def test_equal_contain_sum(self) -> None: """Spatial topology distinction with equal timestamps STRDS A and B have identical time stamps, hence the differentiation @@ -220,7 +221,7 @@ def test_equal_contain_sum(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), None) - def test_equal_equivalent_contain_sum(self): + def test_equal_equivalent_contain_sum(self) -> None: """Spatial topology distinction with equal timestamps STRDS A and B have identical time stamps, hence the differentiation @@ -255,7 +256,7 @@ def test_equal_equivalent_contain_sum(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), None) - def test_equal_equivalent_compare(self): + def test_equal_equivalent_compare(self) -> None: """Test implicit aggregation STRDS A and B have identical time stamps, hence the differentiation diff --git a/python/grass/temporal/testsuite/unittests_temporal_raster_conditionals.py b/python/grass/temporal/testsuite/unittests_temporal_raster_conditionals.py index 813f469c2ce..f38e95412ed 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_raster_conditionals.py +++ b/python/grass/temporal/testsuite/unittests_temporal_raster_conditionals.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalRasterAlgebraConditionals(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -104,16 +105,16 @@ def setUpClass(cls): interval=True, ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", flags="rf", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", inputs="A,B,C,D", quiet=True) cls.del_temp_region() - def test_temporal_conditional_time_dimension_bug(self): + def test_temporal_conditional_time_dimension_bug(self) -> None: """Testing the conditional time dimension bug, that uses the time dimension of the conditional statement instead the time dimension of the then/else statement.""" @@ -135,7 +136,7 @@ def test_temporal_conditional_time_dimension_bug(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_temporal_conditional_1(self): + def test_temporal_conditional_1(self) -> None: """Testing the conditional time dimension bug, that uses the time dimension of the conditional statement instead the time dimension of the then/else statement.""" @@ -155,7 +156,7 @@ def test_temporal_conditional_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_temporal_conditional_relation_1(self): + def test_temporal_conditional_relation_1(self) -> None: """Testing the conditional time dimension bug, that uses the time dimension of the conditional statement instead the time dimension of the then/else statement.""" @@ -177,7 +178,7 @@ def test_temporal_conditional_relation_1(self): self.assertEqual(R.check_temporal_topology(), False) self.assertEqual(R.get_granularity(), "2 days") - def test_spatial_conditional_1(self): + def test_spatial_conditional_1(self) -> None: """Testing the spatial conditionals combined by AND/OR operators. Evaluation""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -198,7 +199,7 @@ def test_spatial_conditional_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_2(self): + def test_spatial_conditional_2(self) -> None: """Testing the spatial conditionals combined by AND/OR operators. Evaluation""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -219,7 +220,7 @@ def test_spatial_conditional_2(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_3(self): + def test_spatial_conditional_3(self) -> None: """Testing the spatial conditionals combined by AND/OR operators. Evaluation""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -236,7 +237,7 @@ def test_spatial_conditional_3(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_4(self): + def test_spatial_conditional_4(self) -> None: """Testing the spatial conditionals combined by AND/OR operators. Evaluation""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -253,7 +254,7 @@ def test_spatial_conditional_4(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_5(self): + def test_spatial_conditional_5(self) -> None: """Testing the spatial conditionals combined by AND/OR operators. Evaluation""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -274,7 +275,7 @@ def test_spatial_conditional_5(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "2 days") - def test_spatial_conditional_relation_1(self): + def test_spatial_conditional_relation_1(self) -> None: """Testing the spatial conditionals combined by AND/OR operators. Evaluation""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -293,7 +294,7 @@ def test_spatial_conditional_relation_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_relation_2(self): + def test_spatial_conditional_relation_2(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -313,7 +314,7 @@ def test_spatial_conditional_relation_2(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_numeric_relation_1(self): + def test_spatial_conditional_numeric_relation_1(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -331,7 +332,7 @@ def test_spatial_conditional_numeric_relation_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_numeric_relation_2(self): + def test_spatial_conditional_numeric_relation_2(self) -> None: """Testing the spatial conditionals combined by AND/OR operators. Evaluation""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -352,7 +353,7 @@ def test_spatial_conditional_numeric_relation_2(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_numeric_1(self): + def test_spatial_conditional_numeric_1(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = if(A > 2, 0, A)", basename="r", overwrite=True) @@ -368,7 +369,7 @@ def test_spatial_conditional_numeric_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_numeric_2(self): + def test_spatial_conditional_numeric_2(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = if(A > 2, A, 8)", basename="r", overwrite=True) @@ -384,7 +385,7 @@ def test_spatial_conditional_numeric_2(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_numeric_3(self): + def test_spatial_conditional_numeric_3(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = if(A > 2, 1, 0)", basename="r", overwrite=True) @@ -400,7 +401,7 @@ def test_spatial_conditional_numeric_3(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatial_conditional_numeric_4(self): + def test_spatial_conditional_numeric_4(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse(expression="R = if(A > 2, null())", basename="r", overwrite=True) @@ -416,7 +417,7 @@ def test_spatial_conditional_numeric_4(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_1(self): + def test_spatiotemporal_conditional_1(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -436,7 +437,7 @@ def test_spatiotemporal_conditional_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_2(self): + def test_spatiotemporal_conditional_2(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -456,7 +457,7 @@ def test_spatiotemporal_conditional_2(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_relation_1(self): + def test_spatiotemporal_conditional_relation_1(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -476,7 +477,7 @@ def test_spatiotemporal_conditional_relation_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_relation_2(self): + def test_spatiotemporal_conditional_relation_2(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -496,7 +497,7 @@ def test_spatiotemporal_conditional_relation_2(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_numeric_relation_1(self): + def test_spatiotemporal_conditional_numeric_relation_1(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -516,7 +517,7 @@ def test_spatiotemporal_conditional_numeric_relation_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_numeric_relation_2(self): + def test_spatiotemporal_conditional_numeric_relation_2(self) -> None: """Testing the spatial conditionals combined by AND/OR operators. Evaluation""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) @@ -537,7 +538,7 @@ def test_spatiotemporal_conditional_numeric_relation_2(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_numeric_1(self): + def test_spatiotemporal_conditional_numeric_1(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -557,7 +558,7 @@ def test_spatiotemporal_conditional_numeric_1(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_numeric_2(self): + def test_spatiotemporal_conditional_numeric_2(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -577,7 +578,7 @@ def test_spatiotemporal_conditional_numeric_2(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_numeric_3(self): + def test_spatiotemporal_conditional_numeric_3(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( @@ -597,7 +598,7 @@ def test_spatiotemporal_conditional_numeric_3(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_spatiotemporal_conditional_numeric_4(self): + def test_spatiotemporal_conditional_numeric_4(self) -> None: """Testing the spatial conditionals with numeric conclusions""" tra = tgis.TemporalRasterAlgebraParser(run=True, debug=True) tra.parse( diff --git a/python/grass/temporal/testsuite/unittests_temporal_raster_conditionals_complement_else.py b/python/grass/temporal/testsuite/unittests_temporal_raster_conditionals_complement_else.py index dd0a9dc8adf..ba70f1e9982 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_raster_conditionals_complement_else.py +++ b/python/grass/temporal/testsuite/unittests_temporal_raster_conditionals_complement_else.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalRasterAlgebraConditionalComplements(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -67,16 +68,16 @@ def setUpClass(cls): interval=True, ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", flags="rf", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule("t.remove", flags="rf", inputs="A,B", quiet=True) cls.del_temp_region() - def test_temporal_conditional_complement(self): + def test_temporal_conditional_complement(self) -> None: """Test the conditional expression that evaluate if then else statements so that the else statement is a complement operation @@ -112,7 +113,7 @@ def test_temporal_conditional_complement(self): self.assertEqual(R.check_temporal_topology(), True) self.assertEqual(R.get_granularity(), "1 day") - def test_temporal_conditional_complement_right_side_timestamps(self): + def test_temporal_conditional_complement_right_side_timestamps(self) -> None: """Test the conditional expression that evaluate if then else statements so that the else statement is a complement operation diff --git a/python/grass/temporal/testsuite/unittests_temporal_vector_algebra.py b/python/grass/temporal/testsuite/unittests_temporal_vector_algebra.py index 7c07a7edc6a..9d7b3cff4c1 100644 --- a/python/grass/temporal/testsuite/unittests_temporal_vector_algebra.py +++ b/python/grass/temporal/testsuite/unittests_temporal_vector_algebra.py @@ -9,14 +9,15 @@ import datetime -import grass.temporal as tgis from grass.gunittest.case import TestCase from grass.gunittest.main import test +import grass.temporal as tgis + class TestTemporalVectorAlgebra(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: """Initiate the temporal GIS and set the region""" tgis.init(True) # Raise on error instead of exit(1) cls.use_temp_region() @@ -146,18 +147,18 @@ def setUpClass(cls): end="2001-01-04", ) - def tearDown(self): + def tearDown(self) -> None: self.runModule("t.remove", type="stvds", inputs="R", quiet=True) @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: """Remove the temporary region""" cls.runModule( "t.remove", flags="rf", inputs="A,B,C,D", type="stvds", quiet=True ) cls.del_temp_region() - def test_temporal_select(self): + def test_temporal_select(self) -> None: """Testing the temporal select operator.""" tva = tgis.TemporalVectorAlgebraParser(run=True, debug=True) tva.parse(expression="R = A : A", basename="r", overwrite=True) @@ -175,7 +176,7 @@ def test_temporal_select(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_extent1(self): + def test_temporal_extent1(self) -> None: """Testing the temporal extent operators.""" ta = tgis.TemporalVectorAlgebraParser(run=True, debug=True) ta.parse(expression="R = A {:,during,r} C", basename="r", overwrite=True) @@ -192,7 +193,7 @@ def test_temporal_extent1(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_select_operators(self): + def test_temporal_select_operators(self) -> None: """Testing the temporal select operator. Including temporal relations.""" tva = tgis.TemporalVectorAlgebraParser(run=True, debug=True) tva.parse(expression="R = A {:,during} C", basename="r", overwrite=True) @@ -210,7 +211,7 @@ def test_temporal_select_operators(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_buff_operators_1(self): + def test_temporal_buff_operators_1(self) -> None: """Testing the bufferoperator.""" tva = tgis.TemporalVectorAlgebraParser(run=True, debug=True) tva.parse(expression="R = buff_p(A,0.5)", basename="r", overwrite=True) @@ -228,7 +229,7 @@ def test_temporal_buff_operators_1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_buff_operators_2(self): + def test_temporal_buff_operators_2(self) -> None: """Testing the bufferoperator.""" tva = tgis.TemporalVectorAlgebraParser(run=True, debug=True) tva.parse(expression="R = buff_a(buff_p(A,1),10)", basename="r", overwrite=True) @@ -246,7 +247,7 @@ def test_temporal_buff_operators_2(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_overlay_operators_1(self): + def test_temporal_overlay_operators_1(self) -> None: """Testing the spatial overlay operator.""" tva = tgis.TemporalVectorAlgebraParser(run=True, debug=True) tva.parse( @@ -266,7 +267,7 @@ def test_temporal_overlay_operators_1(self): self.assertEqual(D.check_temporal_topology(), True) self.assertEqual(D.get_granularity(), "1 day") - def test_temporal_overlay_operators_2(self): + def test_temporal_overlay_operators_2(self) -> None: """Testing the spatial overlay operator.""" tva = tgis.TemporalVectorAlgebraParser(run=True, debug=True) tva.parse( @@ -288,7 +289,7 @@ def test_temporal_overlay_operators_2(self): self.assertEqual(D.check_temporal_topology(), False) self.assertEqual(D.get_granularity(), "2 days") - def test_temporal_overlay_operators_3(self): + def test_temporal_overlay_operators_3(self) -> None: """Testing the spatial overlay operator.""" tva = tgis.TemporalVectorAlgebraParser(run=True, debug=True) tva.parse( diff --git a/python/grass/temporal/unit_tests.py b/python/grass/temporal/unit_tests.py index ea4e4bb62c4..8275e012f25 100644 --- a/python/grass/temporal/unit_tests.py +++ b/python/grass/temporal/unit_tests.py @@ -1,5 +1,5 @@ """ -Depricazed unittests +Deprecated unittests (C) 2008-2011 by the GRASS Development Team This program is free software under the GNU General Public @@ -36,7 +36,7 @@ ############################################################################### -def test_increment_datetime_by_string(): +def test_increment_datetime_by_string() -> None: # First test print("# Test 1") dt = datetime(2001, 9, 1, 0, 0, 0) @@ -105,7 +105,7 @@ def test_increment_datetime_by_string(): ############################################################################### -def test_adjust_datetime_to_granularity(): +def test_adjust_datetime_to_granularity() -> None: # First test print("Test 1") dt = datetime(2001, 8, 8, 12, 30, 30) @@ -223,7 +223,7 @@ def test_adjust_datetime_to_granularity(): ############################################################################### -def test_compute_datetime_delta(): +def test_compute_datetime_delta() -> None: print("Test 1") start = datetime(2001, 1, 1, 0, 0, 0) end = datetime(2001, 1, 1, 0, 0, 0) @@ -566,7 +566,7 @@ def test_compute_datetime_delta(): core.fatal("Compute datetime delta is wrong %s" % (delta)) -def test_compute_absolute_time_granularity(): +def test_compute_absolute_time_granularity() -> None: # First we test intervals print("Test 1") maps = [] @@ -886,7 +886,7 @@ def test_compute_absolute_time_granularity(): ############################################################################### -def test_spatial_extent_intersection(): +def test_spatial_extent_intersection() -> None: # Generate the extents A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50) @@ -970,7 +970,7 @@ def test_spatial_extent_intersection(): ############################################################################### -def test_spatial_relations(): +def test_spatial_relations() -> None: # Generate the extents A = SpatialExtent(north=80, south=20, east=60, west=10, bottom=-50, top=50) @@ -1352,56 +1352,56 @@ def test_spatial_relations(): ############################################################################### -def test_temporal_topology_builder(): +def test_temporal_topology_builder() -> None: map_listA = [] - _map = RasterDataset(ident="1@a") - _map.set_absolute_time(datetime(2001, 1, 1), datetime(2001, 2, 1)) - map_listA.append(copy.copy(_map)) - _map = RasterDataset(ident="2@a") - _map.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 3, 1)) - map_listA.append(copy.copy(_map)) - _map = RasterDataset(ident="3@a") - _map.set_absolute_time(datetime(2001, 3, 1), datetime(2001, 4, 1)) - map_listA.append(copy.copy(_map)) - _map = RasterDataset(ident="4@a") - _map.set_absolute_time(datetime(2001, 4, 1), datetime(2001, 5, 1)) - map_listA.append(copy.copy(_map)) - _map = RasterDataset(ident="5@a") - _map.set_absolute_time(datetime(2001, 5, 1), datetime(2001, 6, 1)) - map_listA.append(copy.copy(_map)) + map_ = RasterDataset(ident="1@a") + map_.set_absolute_time(datetime(2001, 1, 1), datetime(2001, 2, 1)) + map_listA.append(copy.copy(map_)) + map_ = RasterDataset(ident="2@a") + map_.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 3, 1)) + map_listA.append(copy.copy(map_)) + map_ = RasterDataset(ident="3@a") + map_.set_absolute_time(datetime(2001, 3, 1), datetime(2001, 4, 1)) + map_listA.append(copy.copy(map_)) + map_ = RasterDataset(ident="4@a") + map_.set_absolute_time(datetime(2001, 4, 1), datetime(2001, 5, 1)) + map_listA.append(copy.copy(map_)) + map_ = RasterDataset(ident="5@a") + map_.set_absolute_time(datetime(2001, 5, 1), datetime(2001, 6, 1)) + map_listA.append(copy.copy(map_)) tb = SpatioTemporalTopologyBuilder() tb.build(map_listA) count = 0 - for _map in tb: - print("[%s]" % (_map.get_name())) - _map.print_topology_info() - if _map.get_id() != map_listA[count].get_id(): + for map_ in tb: + print("[%s]" % (map_.get_name())) + map_.print_topology_info() + if map_.get_id() != map_listA[count].get_id(): core.fatal( "Error building temporal topology <%s> != <%s>" - % (_map.get_id(), map_listA[count].get_id()) + % (map_.get_id(), map_listA[count].get_id()) ) count += 1 map_listB = [] - _map = RasterDataset(ident="1@b") - _map.set_absolute_time(datetime(2001, 1, 14), datetime(2001, 3, 14)) - map_listB.append(copy.copy(_map)) - _map = RasterDataset(ident="2@b") - _map.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 4, 1)) - map_listB.append(copy.copy(_map)) - _map = RasterDataset(ident="3@b") - _map.set_absolute_time(datetime(2001, 2, 14), datetime(2001, 4, 30)) - map_listB.append(copy.copy(_map)) - _map = RasterDataset(ident="4@b") - _map.set_absolute_time(datetime(2001, 4, 2), datetime(2001, 4, 30)) - map_listB.append(copy.copy(_map)) - _map = RasterDataset(ident="5@b") - _map.set_absolute_time(datetime(2001, 5, 1), datetime(2001, 5, 14)) - map_listB.append(copy.copy(_map)) + map_ = RasterDataset(ident="1@b") + map_.set_absolute_time(datetime(2001, 1, 14), datetime(2001, 3, 14)) + map_listB.append(copy.copy(map_)) + map_ = RasterDataset(ident="2@b") + map_.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 4, 1)) + map_listB.append(copy.copy(map_)) + map_ = RasterDataset(ident="3@b") + map_.set_absolute_time(datetime(2001, 2, 14), datetime(2001, 4, 30)) + map_listB.append(copy.copy(map_)) + map_ = RasterDataset(ident="4@b") + map_.set_absolute_time(datetime(2001, 4, 2), datetime(2001, 4, 30)) + map_listB.append(copy.copy(map_)) + map_ = RasterDataset(ident="5@b") + map_.set_absolute_time(datetime(2001, 5, 1), datetime(2001, 5, 14)) + map_listB.append(copy.copy(map_)) tb = SpatioTemporalTopologyBuilder() tb.build(map_listB) @@ -1417,13 +1417,13 @@ def test_temporal_topology_builder(): core.fatal("Error building temporal topology") count = 0 - for _map in tb: - print("[%s]" % (_map.get_map_id())) - _map.print_topology_shell_info() - if _map.get_id() != map_listB[count].get_id(): + for map_ in tb: + print("[%s]" % (map_.get_map_id())) + map_.print_topology_shell_info() + if map_.get_id() != map_listB[count].get_id(): core.fatal( "Error building temporal topology <%s> != <%s>" - % (_map.get_id(), map_listB[count].get_id()) + % (map_.get_id(), map_listB[count].get_id()) ) count += 1 @@ -1431,20 +1431,20 @@ def test_temporal_topology_builder(): tb.build(map_listA, map_listB) count = 0 - for _map in tb: - print("[%s]" % (_map.get_map_id())) - _map.print_topology_shell_info() - if _map.get_id() != map_listA[count].get_id(): + for map_ in tb: + print("[%s]" % (map_.get_map_id())) + map_.print_topology_shell_info() + if map_.get_id() != map_listA[count].get_id(): core.fatal( "Error building temporal topology <%s> != <%s>" - % (_map.get_id(), map_listA[count].get_id()) + % (map_.get_id(), map_listA[count].get_id()) ) count += 1 count = 0 - for _map in map_listB: - print("[%s]" % (_map.get_map_id())) - _map.print_topology_shell_info() + for map_ in map_listB: + print("[%s]" % (map_.get_map_id())) + map_.print_topology_shell_info() # Probing some relations if map_listA[3].get_follows()[0] != map_listB[1]: @@ -1465,31 +1465,31 @@ def test_temporal_topology_builder(): ############################################################################### -def test_map_list_sorting(): +def test_map_list_sorting() -> None: map_list = [] - _map = RasterDataset(ident="1@a") - _map.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 3, 1)) - map_list.append(copy.copy(_map)) - _map = RasterDataset(ident="2@a") - _map.set_absolute_time(datetime(2001, 1, 1), datetime(2001, 2, 1)) - map_list.append(copy.copy(_map)) - _map = RasterDataset(ident="3@a") - _map.set_absolute_time(datetime(2001, 3, 1), datetime(2001, 4, 1)) - map_list.append(copy.copy(_map)) + map_ = RasterDataset(ident="1@a") + map_.set_absolute_time(datetime(2001, 2, 1), datetime(2001, 3, 1)) + map_list.append(copy.copy(map_)) + map_ = RasterDataset(ident="2@a") + map_.set_absolute_time(datetime(2001, 1, 1), datetime(2001, 2, 1)) + map_list.append(copy.copy(map_)) + map_ = RasterDataset(ident="3@a") + map_.set_absolute_time(datetime(2001, 3, 1), datetime(2001, 4, 1)) + map_list.append(copy.copy(map_)) print("Original") - for _map in map_list: + for map_ in map_list: print( - _map.get_temporal_extent_as_tuple()[0], - _map.get_temporal_extent_as_tuple()[1], + map_.get_temporal_extent_as_tuple()[0], + map_.get_temporal_extent_as_tuple()[1], ) print("Sorted by start time") new_list = sorted(map_list, key=AbstractDatasetComparisonKeyStartTime) - for _map in new_list: + for map_ in new_list: print( - _map.get_temporal_extent_as_tuple()[0], - _map.get_temporal_extent_as_tuple()[1], + map_.get_temporal_extent_as_tuple()[0], + map_.get_temporal_extent_as_tuple()[1], ) if new_list[0] != map_list[1]: @@ -1501,10 +1501,10 @@ def test_map_list_sorting(): print("Sorted by end time") new_list = sorted(map_list, key=AbstractDatasetComparisonKeyEndTime) - for _map in new_list: + for map_ in new_list: print( - _map.get_temporal_extent_as_tuple()[0], - _map.get_temporal_extent_as_tuple()[1], + map_.get_temporal_extent_as_tuple()[0], + map_.get_temporal_extent_as_tuple()[1], ) if new_list[0] != map_list[1]: @@ -1518,7 +1518,7 @@ def test_map_list_sorting(): ############################################################################### -def test_1d_rtree(): +def test_1d_rtree() -> None: """Testing the rtree ctypes wrapper""" tree = rtree.RTreeCreateTree(-1, 0, 1) @@ -1548,7 +1548,7 @@ def test_1d_rtree(): ############################################################################### -def test_2d_rtree(): +def test_2d_rtree() -> None: """Testing the rtree ctypes wrapper""" tree = rtree.RTreeCreateTree(-1, 0, 2) @@ -1580,7 +1580,7 @@ def test_2d_rtree(): ############################################################################### -def test_3d_rtree(): +def test_3d_rtree() -> None: """Testing the rtree ctypes wrapper""" tree = rtree.RTreeCreateTree(-1, 0, 3) @@ -1622,7 +1622,7 @@ def test_3d_rtree(): ############################################################################### -def test_4d_rtree(): +def test_4d_rtree() -> None: """Testing the rtree ctypes wrapper""" tree = rtree.RTreeCreateTree(-1, 0, 4) diff --git a/python/grass/temporal/univar_statistics.py b/python/grass/temporal/univar_statistics.py index 77ebaca30b1..f5b25414b43 100755 --- a/python/grass/temporal/univar_statistics.py +++ b/python/grass/temporal/univar_statistics.py @@ -34,7 +34,9 @@ ############################################################################### -def compute_univar_stats(registered_map_info, stats_module, fs, rast_region=False): +def compute_univar_stats( + registered_map_info, stats_module, fs, rast_region: bool = False +): """Compute univariate statistics for a map of a space time raster or raster3d dataset @@ -116,14 +118,14 @@ def print_gridded_dataset_univar_statistics( output, where, extended, - no_header=False, - fs="|", - rast_region=False, + no_header: bool = False, + fs: str = "|", + rast_region: bool = False, region_relation=None, zones=None, percentile=None, - nprocs=1, -): + nprocs: int = 1, +) -> None: """Print univariate statistics for a space time raster or raster3d dataset. Returns None if the space time raster dataset is empty or if applied filters (where, region_relation) do not return any maps to process. @@ -275,8 +277,17 @@ def print_gridded_dataset_univar_statistics( def print_vector_dataset_univar_statistics( - input, output, twhere, layer, type, column, where, extended, no_header=False, fs="|" -): + input, + output, + twhere, + layer, + type, + column, + where, + extended, + no_header: bool = False, + fs: str = "|", +) -> None: """Print univariate statistics for a space time vector dataset :param input: The name of the space time dataset diff --git a/raster/Makefile b/raster/Makefile index 91ff54d0863..a8f00ee9c7c 100644 --- a/raster/Makefile +++ b/raster/Makefile @@ -147,6 +147,10 @@ htmldir: parsubdirs $(HTMLDIR)/r.in.png: # no-op - override Html.make rule for .png image files +$(MDDIR)/source/r.in.png: + # no-op - override Html.make rule for .png image files $(HTMLDIR)/r.out.png: # no-op - override Html.make rule for .png image files +$(MDDIR)/source/r.out.png: + # no-op - override Html.make rule for .png image files diff --git a/raster/r.basins.fill/r.basins.fill.html b/raster/r.basins.fill/r.basins.fill.html index a92e242d198..854bd94ef38 100644 --- a/raster/r.basins.fill/r.basins.fill.html +++ b/raster/r.basins.fill/r.basins.fill.html @@ -22,7 +22,6 @@

    DESCRIPTION

    If the resulting map layer from this program appears to have holes within a subbasin, the program should be rerun with a higher number of passes. -

    NOTES

    @@ -32,8 +31,8 @@

    NOTES

    SEE ALSO

    -See Appendix A of the GRASS Tutorial: +See Appendix A of the GRASS +Tutorial: r.watershed for further details on the combined use of r.basins.fill and r.watershed. diff --git a/raster/r.buffer/r.buffer.html b/raster/r.buffer/r.buffer.html index e9f9fbd6ffc..c8a3913b95a 100644 --- a/raster/r.buffer/r.buffer.html +++ b/raster/r.buffer/r.buffer.html @@ -32,10 +32,10 @@

    NOTES

    The user has the option of identifying up to 250 continuous zones. The zones are identified by specifying the upper limit of each desired -zone (r.buffer assumes that 0 is the starting +zone (r.buffer assumes that 0 is the starting point). "Continuous" is used in the sense that each category zone's lower value is the previous zone's upper value. The first buffer zone -always has distance 0 as its lower bound. Buffer distances +always has distance 0 as its lower bound. Buffer distances can be specified using one of five units with the units parameter.

    @@ -97,7 +97,7 @@

    EXAMPLE

    -
    +
    Distances to road
    diff --git a/raster/r.buildvrt/r.buildvrt.html b/raster/r.buildvrt/r.buildvrt.html index d5118ba5150..f69ebd1e8ef 100644 --- a/raster/r.buildvrt/r.buildvrt.html +++ b/raster/r.buildvrt/r.buildvrt.html @@ -22,7 +22,7 @@

    NOTES

    A GRASS virtual raster can be regarded as a simplified version of GDAL's -virtual raster format. +virtual raster format. The GRASS equivalent is simpler because issues like nodata, projection, resolution, resampling, masking are already handled by native GRASS raster routines. @@ -72,7 +72,7 @@

    SEE ALSO

    The equivalent GDAL utility -gdalbuildvrt +gdalbuildvrt

    AUTHOR

    diff --git a/raster/r.carve/r.carve.html b/raster/r.carve/r.carve.html index f8d82200013..3d36aa45256 100644 --- a/raster/r.carve/r.carve.html +++ b/raster/r.carve/r.carve.html @@ -65,13 +65,13 @@

    EXAMPLE

    - - - - - -
    + r.carve example: original DEM
    Fig: Original 1m LiDAR based DEM with vector streams map on top
    + r.carve example: original DEM shaded
    @@ -79,13 +79,13 @@

    EXAMPLE

    + r.carve example: carved DEM
    Fig: Carved 1m LiDAR based DEM
    + r.carve example: carved DEM shaded
    @@ -93,13 +93,13 @@

    EXAMPLE

    + r.carve example: original DEM flow accumulated
    Fig: Flow accumulation in original 1m LiDAR based DEM
    + r.carve example: carved DEM flow accumulation
    @@ -118,7 +118,7 @@

    KNOWN ISSUES

    REFERENCES

    -Terrain +Terrain modeling and Soil Erosion Simulations for Fort Hood and Fort Polk test areas, by Helena Mitasova, Lubos Mitas, William M. Brown, Douglas M. Johnston, GMSL (Report for CERL 1999) diff --git a/raster/r.category/r.category.html b/raster/r.category/r.category.html index 4dba832a98c..7c40974aa51 100644 --- a/raster/r.category/r.category.html +++ b/raster/r.category/r.category.html @@ -45,7 +45,7 @@

    Input from a file

    val1:val2:Label -If the filename is given as "-", the category labels are read from stdin +If the filename is given as "-", the category labels are read from stdin

    Default and dynamic category labels

    @@ -68,26 +68,26 @@

    Default and dynamic category labels

    In the format line

      -
    • $1 refers to the value num*5.0+1000 (ie, using the first 2 coefficients) -
    • $2 refers to the value num*5.0+1005 (ie, using the last 2 coefficients) +
    • $1 refers to the value num*5.0+1000 (ie, using the first 2 coefficients)
    • +
    • $2 refers to the value num*5.0+1005 (ie, using the last 2 coefficients)
    - $1.2 will print $1 with 2 decimal places. -

    Also, the form $?xxx$yyy$ translates into yyy if the category is 1, xxx -otherwise. The $yyy$ is optional. Thus -

    $1 meter$?s + $1.2 will print $1 with 2 decimal places. +

    Also, the form $?xxx$yyy$ translates into yyy if the category is 1, xxx +otherwise. The $yyy$ is optional. Thus +

    $1 meter$?s

    will become:
    - 1 meter (for category 1)
    - 2 meters (for category 2), etc. + 1 meter (for category 1)
    + 2 meters (for category 2), etc.

    -format='Elevation: $1.2 to $2.2 feet' ## Format Statement
    -coefficients="5.0,1000,5.0,1005" ## Coefficients +format='Elevation: $1.2 to $2.2 feet' ## Format Statement
    +coefficients="5.0,1000,5.0,1005" ## Coefficients

    The format and coefficients above would be used to generate the following statement in creation of the format appropriate category string for category "num":

    -sprintf(buff,"Elevation: %.2f to %.2f feet", num*5.0+1000, num*5.0*1005) +sprintf(buff,"Elevation: %.2f to %.2f feet", num*5.0+1000, num*5.0*1005)

    Note: while both the format and coefficient lines must be present a blank line for the format string will effectively suppress @@ -99,9 +99,9 @@

    Default and dynamic category labels

    know that i-th rule maps fp range to i, thus we know for sure that cats.labels[i] corresponds to i-th quant rule --> -

    To use a "$" in the label without triggering the plural test, -put "$$" in the format string. -

    Use 'single quotes' when using a "$" on the command line to +

    To use a "$" in the label without triggering the plural test, +put "$$" in the format string. +

    Use 'single quotes' when using a "$" on the command line to avoid unwanted shell substitution.

    EXAMPLES

    @@ -133,7 +133,7 @@

    Printing categories

    prints only the category values and labels for landclass96 map layer -categories 2 and 5 through 7. +categories 2 and 5 through 7.

    @@ -143,7 +143,7 @@ 

    Printing categories

    prints the values and labels for landclass96 map layer categories -3 and 4, but uses "," (instead of a tab) +3 and 4, but uses "," (instead of a tab) as the character separating the category values from the category values in the output. diff --git a/raster/r.circle/r.circle.html b/raster/r.circle/r.circle.html index b3ed725fc84..dc1b4c9d34f 100644 --- a/raster/r.circle/r.circle.html +++ b/raster/r.circle/r.circle.html @@ -1,6 +1,6 @@

    DESCRIPTION

    -This module creates an output raster map centered on the x,y values specified +r.circle creates an output raster map centered on the x,y values specified with the coordinate parameter, out to the edge of the current region. The output cell values increase linearly from the specified center. The min and max parameters control the inner and outer output raster map radii, respectively. diff --git a/raster/r.clump/main.c b/raster/r.clump/main.c index 74b41d96214..733816df070 100644 --- a/raster/r.clump/main.c +++ b/raster/r.clump/main.c @@ -131,7 +131,9 @@ int main(int argc, char *argv[]) } INPUT = opt_in->answers[0]; - strcpy(name, INPUT); + if (G_strlcpy(name, INPUT, sizeof(name)) >= sizeof(name)) { + G_fatal_error(_("Input raster name <%s> is too long"), INPUT); + } OUTPUT = NULL; out_fd = -1; @@ -155,8 +157,12 @@ int main(int argc, char *argv[]) G_debug(1, "Creating support files..."); /* build title */ - if (opt_title->answer != NULL) - strcpy(title, opt_title->answer); + if (opt_title->answer != NULL) { + if (G_strlcpy(title, opt_title->answer, sizeof(title)) >= + sizeof(title)) { + G_fatal_error(_("Title <%s> is too long"), opt_title->answer); + } + } else sprintf(title, "clump of <%s@%s>", name, G_mapset()); Rast_put_cell_title(OUTPUT, title); diff --git a/raster/r.coin/r.coin.html b/raster/r.coin/r.coin.html index bbaf260df83..4b7dc3af0eb 100644 --- a/raster/r.coin/r.coin.html +++ b/raster/r.coin/r.coin.html @@ -18,17 +18,16 @@

    DESCRIPTION

    units of measure in which the report results can be given. These units are: -

    -

    -
    c
    cells -
    p
    percent cover of region -
    x
    percent of <map name> category (column) -
    y
    percent of <map name> category (row) -
    a
    acres -
    h
    hectares -
    k
    square kilometers -
    m
    square miles -
    +
      +
    • c: cells
    • +
    • p: percent cover of region
    • +
    • x: percent of <map name> category (column)
    • +
    • y: percent of <map name> category (row)
    • +
    • a: acres
    • +
    • h: hectares
    • +
    • k: square kilometers
    • +
    • m: square miles
    • +

    Note that three of these options give results as percentage @@ -188,7 +187,7 @@

    SEE ALSO

    g.region, - + r.category, r.describe, r.reclass, diff --git a/raster/r.coin/tests/conftest.py b/raster/r.coin/tests/conftest.py new file mode 100644 index 00000000000..29c9eb0c3a2 --- /dev/null +++ b/raster/r.coin/tests/conftest.py @@ -0,0 +1,57 @@ +import os +import pytest +import grass.script as gs + + +@pytest.fixture +def setup_maps(tmp_path): + """Set up a GRASS session and create test raster maps.""" + + # Initialize GRASS project + project = tmp_path / "r_coin_project" + gs.create_project(project) + with gs.setup.init(project, env=os.environ.copy()) as session: + # Set the region + gs.run_command( + "g.region", + n=3, + s=0, + e=3, + w=0, + res=1, + env=session.env, + ) + + # Create the raster maps + # map1: + # 1 1 2 + # 1 2 3 + # 2 2 3 + gs.mapcalc( + "map1 = " + "if(row() == 1 && col() <= 2, 1, " + "if(row() == 1 && col() == 3, 2, " + "if(row() == 2 && col() == 1, 1, " + "if(row() == 2 && col() == 2, 2, " + "if(row() == 2 && col() == 3, 3, " + "if(row() == 3 && col() <= 2, 2, 3))))))", + overwrite=True, + env=session.env, + ) + + # map2: + # 1 2 2 + # 2 1 3 + # 3 3 3 + gs.mapcalc( + "map2 = " + "if(row() == 1 && col() == 1, 1, " + "if(row() == 1 && col() >= 2, 2, " + "if(row() == 2 && col() == 1, 2, " + "if(row() == 2 && col() == 2, 1, " + "if(row() == 2 && col() == 3, 3, " + "if(row() >= 3, 3, null()))))))", + overwrite=True, + env=session.env, + ) + yield session # Pass the session to tests diff --git a/raster/r.coin/tests/test_coin.py b/raster/r.coin/tests/test_coin.py new file mode 100644 index 00000000000..1befeed88d0 --- /dev/null +++ b/raster/r.coin/tests/test_coin.py @@ -0,0 +1,63 @@ +import grass.script as gs + + +def validate_r_coin_output(actual_results, expected_results): + """Validate r.coin output against expected results.""" + assert set(actual_results) == set( + expected_results + ), f"Expected {expected_results}, but got {actual_results}" + + +def test_r_coin(setup_maps): + """Test the r.coin module.""" + session = setup_maps + coin_output = gs.parse_command( + "r.coin", first="map1", second="map2", units="c", env=session.env + ) + + # Start parsing the output + actual_results = [] + is_data_section = False + + for line in coin_output: + # Detect the start of the data section + if "| cat# |" in line: + is_data_section = True + continue + + # Stop parsing after the data section ends + if is_data_section and line.startswith("+"): + break + + # Parse rows in the data section + if is_data_section: + columns = line.split("|") + if len(columns) > 2: + try: + # Extract categories and counts from the table + cat1 = int(columns[1].strip()[1:]) # Category from the row header + val1 = int(columns[2].strip()) # Value in column 1 + val2 = int(columns[3].strip()) # Value in column 2 + val3 = int(columns[4].strip()) # Value in column 3 + + actual_results.extend( + [(cat1, 1, val1), (cat1, 2, val2), (cat1, 3, val3)] + ) + except ValueError: + pass # Ignore lines that cannot be parsed as numbers + + # Expected results + expected_results = [ + (1, 1, 1), + (1, 2, 1), + (1, 3, 0), + (2, 1, 2), + (2, 2, 1), + (2, 3, 0), + (3, 1, 0), + (3, 2, 2), + (3, 3, 2), + ] + + # Validate results + validate_r_coin_output(actual_results, expected_results) diff --git a/raster/r.colors/r.colors.html b/raster/r.colors/r.colors.html index 8fdf83d1313..3762adda465 100644 --- a/raster/r.colors/r.colors.html +++ b/raster/r.colors/r.colors.html @@ -71,7 +71,7 @@

    DESCRIPTION

    certain ranges. One can get a rough idea of the applicability of a colour table by reading the -corresponding rules file ($GISBASE/etc/colors/<name>). +corresponding rules file ($GISBASE/etc/colors/<name>). For example the slope rule is defined as:
    @@ -231,14 +231,14 @@ 

    DESCRIPTION

    NOTES

    -All color tables are stored in $GISBASE/etc/colors/. Further +All color tables are stored in $GISBASE/etc/colors/. Further user-defined color tables can also be stored in this directory for access from the color parameter or in a user defined directory. See also r.colors.out for printing color tables easily to the terminal.

    The color table assigned to a raster map is stored in -$GISDBASE/location/mapset/colr/. +$GISDBASE/location/mapset/colr/.

    EXAMPLES

    @@ -309,7 +309,7 @@

    SEE ALSO

    page Color tables (from GRASS User Wiki) -

    ColorBrewer is an online tool designed to +

    ColorBrewer is an online tool designed to help people select good color schemes for maps and other graphics.

    AUTHORS

    diff --git a/raster/r.colors/r3.colors.html b/raster/r.colors/r3.colors.html index c4e65a62bee..16b35b76b1d 100644 --- a/raster/r.colors/r3.colors.html +++ b/raster/r.colors/r3.colors.html @@ -24,7 +24,7 @@

    SEE ALSO

    page Color tables (from GRASS User Wiki) -

    ColorBrewer is an online tool designed to +

    ColorBrewer is an online tool designed to help people select good color schemes for maps and other graphics.

    AUTHORS

    diff --git a/raster/r.compress/r.compress.html b/raster/r.compress/r.compress.html index 5a95a970bbf..665015e2cc4 100644 --- a/raster/r.compress/r.compress.html +++ b/raster/r.compress/r.compress.html @@ -10,13 +10,13 @@

    DESCRIPTION

    method if available, otherwise ZLIB compression is used (see below). Related no data files (i.e.: NULL files), if present, are compressed by default unless a specific environment variable is set to explicitly -disable NULL file compression (GRASS_COMPRESS_NULLS, see +disable NULL file compression (GRASS_COMPRESS_NULLS, see below).

    During compression or re-compression, r.compress compresses raster maps using the method specified by means of the environment -variable GRASS_COMPRESSOR. The default compression method is +variable GRASS_COMPRESSOR. The default compression method is ZSTD if available, otherwise ZLIB's "deflate" algorithm (LZ77-based). Raster maps that contain very little information (such as boundary, geology, soils and land use maps) can be greatly reduced in size. Some @@ -37,10 +37,10 @@

    DESCRIPTION

    Raster maps that are already compressed might be compressed again, -either by setting a different method with GRASS_COMPRESSOR +either by setting a different method with GRASS_COMPRESSOR (supported methods: RLE, ZLIB, LZ4, BZIP2, ZSTD) or, for the case of ZLIB compression, by changing the compression level with the -environment variable GRASS_ZLIB_LEVEL. +environment variable GRASS_ZLIB_LEVEL.

    Compressed raster maps may be decompressed using r.compress @@ -51,7 +51,7 @@

    DESCRIPTION

    Information about the compression method and data type of the input raster map(s) can be printed in shell style with the -g flag. In -this case, the module prints to stdout one line per input map +this case, the module prints to stdout one line per input map with the fields "input map name", "data type", "name of data compression method", "NULL file compression" separated by the pipe character. NULL file @@ -73,27 +73,27 @@

    TERMINOLOGY

    OVERVIEW OF AVAILABLE COMPRESSION ALGORITHMS

    The following compression methods are available (set by -export GRASS_COMPRESSOR=method): +export GRASS_COMPRESSOR=method):
      -
    • NONE (uncompressed)
    • -
    • RLE (generic Run-Length Encoding of single bytes; deprecated)
    • -
    • ZLIB (DEFLATE, good speed and compression) +
    • NONE (uncompressed)
    • +
    • RLE (generic Run-Length Encoding of single bytes; deprecated)
    • +
    • ZLIB (DEFLATE, good speed and compression)
        -
      • with zlib compression levels (export GRASS_ZLIB_LEVEL=X): -1..9 +
      • with zlib compression levels (export GRASS_ZLIB_LEVEL=X): -1..9 (-1 is default which corresponds to ZLIB level 6)
      • -
      • note: export GRASS_ZLIB_LEVEL=0 is equal to copying the data +
      • note: export GRASS_ZLIB_LEVEL=0 is equal to copying the data as-is from source to destination
    • -
    • LZ4 (fastest, low compression)
    • -
    • BZIP2 (slowest, high compression)
    • -
    • ZSTD (compared to ZLIB, faster and higher compression, +
    • LZ4 (fastest, low compression)
    • +
    • BZIP2 (slowest, high compression)
    • +
    • ZSTD (compared to ZLIB, faster and higher compression, much faster decompression - default compression)
    Important: the NULL file compression can be turned off with -export GRASS_COMPRESS_NULLS=0. Raster maps with NULL file +export GRASS_COMPRESS_NULLS=0. Raster maps with NULL file compression can only be opened with GRASS GIS 7.2.0 or later. NULL file compression for a particular raster map can be managed with r.null -z. The NULL file compression is using the LZ4 method as being the best compromise @@ -104,11 +104,11 @@

    COMPRESSION ALGORITHM DETAILS

    All GRASS GIS raster map types are by default ZSTD compressed if available, otherwise ZLIB compressed. Through the environment variable -GRASS_COMPRESSOR the compression method can be set to RLE, +GRASS_COMPRESSOR the compression method can be set to RLE, ZLIB, LZ4, BZIP2, or ZSTD.

    Integer (CELL type) raster maps can be compressed with RLE if -the environment variable GRASS_COMPRESSOR exists and is set to +the environment variable GRASS_COMPRESSOR exists and is set to RLE. However, this is not recommended.

    Floating point (FCELL, DCELL) raster maps never use RLE compression; @@ -133,7 +133,7 @@

    COMPRESSION ALGORITHM DETAILS

    compression level which is the best compromise between speed and compression ratio, also when compared to other available compression methods. Valid levels are in the range [1, 9] and can be set with the -environment variable GRASS_ZLIB_LEVEL. +environment variable GRASS_ZLIB_LEVEL.
    LZ4
    LZ4 is a very fast compression method, about as fast as no compression. Decompression is also very fast. The compression ratio is @@ -160,7 +160,7 @@

    NOTES

    Compression method number scheme

    The used compression method is encoded with numbers. In the internal -cellhd file, the value for "compressed" is 1 for RLE, 2 +cellhd file, the value for "compressed" is 1 for RLE, 2 for ZLIB, 3 for LZ4, 4 for BZIP2, and 5 for ZSTD.

    Obviously, decompression is controlled by the raster map's compression, @@ -185,7 +185,7 @@

    Formats

    ZLIB compression levels

    -If the environment variable GRASS_ZLIB_LEVEL exists and +If the environment variable GRASS_ZLIB_LEVEL exists and its value can be parsed as an integer, it determines the compression level used when newly generated raster maps are compressed using ZLIB compression. This applies to all raster map types (CELL, FCELL, DCELL). @@ -273,9 +273,9 @@

    SEE ALSO

    Compression algorithms: bzip2, -LZ4, -zlib, -zstd +LZ4, +zlib, +zstd

    AUTHORS

    diff --git a/raster/r.contour/r.contour.html b/raster/r.contour/r.contour.html index 722f8494cb9..4023410ea94 100644 --- a/raster/r.contour/r.contour.html +++ b/raster/r.contour/r.contour.html @@ -50,7 +50,7 @@

    EXAMPLES

    -r.contours example
    +r.contours example
    Contour lines shown on shaded terrain map
    diff --git a/raster/r.cost/r.cost.html b/raster/r.cost/r.cost.html index e5118d98aea..db5343ec650 100644 --- a/raster/r.cost/r.cost.html +++ b/raster/r.cost/r.cost.html @@ -53,10 +53,10 @@

    OPTIONS

    use the --v verbose flag to track progress.

    The Knight's move (-k flag) may be used to improve the accuracy of -the output. In the diagram below, the center location (O) represents a +the output. In the diagram below, the center location (O) represents a grid cell from which cumulative distances are calculated. Those -neighbors marked with an X are always considered for cumulative cost -updates. With the -k option, the neighbors marked with a K are +neighbors marked with an X are always considered for cumulative cost +updates. With the -k option, the neighbors marked with a K are also considered.

    @@ -135,7 +135,7 @@ 

    NULL CELLS

    the algorithm, and thus retained in the output map.

    If one wants r.cost to transparently cross any region of null cells, -the null_cost=0.0 option should be used. Then null cells just +the null_cost=0.0 option should be used. Then null cells just propagate the adjacent costs. These cells can be retained as null cells in the output map by using the -n flag. diff --git a/raster/r.covar/r.covar.html b/raster/r.covar/r.covar.html index 87f1345d7e7..a510bb39c7a 100644 --- a/raster/r.covar/r.covar.html +++ b/raster/r.covar/r.covar.html @@ -17,9 +17,9 @@

    NOTES

    N real eigen values and N eigen vectors (each composed of N real numbers).

    -The module m.eigensystem +The module m.eigensystem in GRASS GIS Addons -can be compiled and used to generate the eigen values and vectors. +can be installed and used to generate the eigenvalues and vectors.

    EXAMPLE

    diff --git a/raster/r.describe/r.describe.html b/raster/r.describe/r.describe.html index d0f98692bad..a8627d4f329 100644 --- a/raster/r.describe/r.describe.html +++ b/raster/r.describe/r.describe.html @@ -12,7 +12,7 @@

    DESCRIPTION

    also ignore the current geographic region and mask. -

    The nv parameter sets the string to be used to represent NULL +

    The nv parameter sets the string to be used to represent NULL values in the module output; the default is '*'.

    The nsteps parameter sets the number of quantisation steps to divide into @@ -36,33 +36,36 @@

    FLAGS

    If the -1 flag is specified, the output appears with one category value/range per line. -

    The -n flag suppresses the reporting of NULL values. +

    The -n flag suppresses the reporting of NULL values.

    EXAMPLES

    The following examples are from the Spearfish60 sample dataset: -

    -# Print the full list of raster map categories: +

    Print the full list of raster map categories

    +
     r.describe landcover.30m
     * 11 21-23 31 32 41-43 51 71 81-83 85 91 92
     
    -

    -# Print the raster range only: + +

    Print the raster range only

    +
     r.describe -r landcover.30m
     11 thru 92
     *
     
    -# Print raster map category range, suppressing nulls: +

    Print raster map category range, suppressing nulls

    +
     r.describe -n landcover.30m
     11 21-23 31 32 41-43 51 71 81-83 85 91 92
     
    -

    -# Print raster map categories, one category per line: + +

    Print raster map categories, one category per line

    +
     r.describe -1 geology
     
    @@ -77,7 +80,7 @@ 

    EXAMPLES

    8 9
    -

    +

    SEE ALSO

    diff --git a/raster/r.distance/testsuite/test_distance.py b/raster/r.distance/testsuite/test_distance.py new file mode 100644 index 00000000000..62809b2d55e --- /dev/null +++ b/raster/r.distance/testsuite/test_distance.py @@ -0,0 +1,81 @@ +from grass.gunittest.case import TestCase +from grass.gunittest.main import test +from grass.gunittest.gmodules import SimpleModule + + +class TestRDistance(TestCase): + + @classmethod + def setUpClass(cls): + """Set up a temporary region and generate test data.""" + cls.use_temp_region() + cls.runModule("g.region", n=10, s=0, e=10, w=0, res=1) + # Create 'map1' with a block at the top-left corner + cls.runModule( + "r.mapcalc", + expression="map1 = if(row() <= 2 && col() <= 2, 1, null())", + overwrite=True, + ) + # Create 'map2' with a block in the center + cls.runModule( + "r.mapcalc", + expression="map2 = if(row() >= 4 && row() <=6 && col() >= 4 && col() <= 6, 1, null())", + overwrite=True, + ) + cls.runModule("r.mapcalc", expression="map3 = null()", overwrite=True) + + @classmethod + def tearDownClass(cls): + """Clean up after tests.""" + cls.runModule( + "g.remove", flags="f", type="raster", name=["map1", "map2", "map3"] + ) + cls.del_temp_region() + + def test_distance(self): + """Test distance calculation between map1 and map2.""" + module = SimpleModule("r.distance", map=("map1", "map2")) + self.assertModule(module) + + result = module.outputs.stdout.strip().splitlines() + + expected_results = ["1:1:2.8284271247:1.5:8.5:3.5:6.5"] + + for i, component in enumerate(result): + self.assertEqual( + component, expected_results[i], f"Mismatch at line {i + 1}" + ) + + def test_overlap_distance(self): + """Test r.distance when comparing a map to itself with overlapping features.""" + module = SimpleModule("r.distance", map=("map1", "map1"), flags="o") + self.assertModule(module) + + result = module.outputs.stdout.strip().splitlines() + + expected_results = ["1:1:0:0.5:9.5:0.5:9.5"] + + self.assertEqual( + result, + expected_results, + "Mismatch in r.distance output for overlapping features", + ) + + def test_null_distance(self): + """Test r.distance when reporting null values with -n flag.""" + module = SimpleModule("r.distance", map=("map3", "map2"), flags="n") + self.assertModule(module) + + result = module.outputs.stdout.strip().splitlines() + + expected_results = ["*:*:0:0.5:9.5:0.5:9.5", "*:1:2:3.5:8.5:3.5:6.5"] + + self.assertEqual( + result, + expected_results, + "Mismatch in r.distance output for reporting null objects as *", + ) + + +if __name__ == "__main__": + test() diff --git a/raster/r.external.out/r.external.out.html b/raster/r.external.out/r.external.out.html index c4b3bc65af1..86c490587d4 100644 --- a/raster/r.external.out/r.external.out.html +++ b/raster/r.external.out/r.external.out.html @@ -31,7 +31,7 @@

    Storing results from raster data analysis directly as GeoTIFF

    # prepare sample analysis g.region raster=elevation -p -# perform GRASS calculation (here: filter by height, write > 120m, NULL otherwise) +# perform GRASS calculation (here: filter by height, write > 120m, NULL otherwise) # this will store the output map directly as GeoTIFF, so we use .tif extension: r.mapcalc "elev_filt.tif = if(elevation > 120.0, elevation, null() )" diff --git a/raster/r.fill.dir/r.fill.dir.html b/raster/r.fill.dir/r.fill.dir.html index 17173ea3dd3..4e802ea28b7 100644 --- a/raster/r.fill.dir/r.fill.dir.html +++ b/raster/r.fill.dir/r.fill.dir.html @@ -82,24 +82,24 @@

    NOTES

  • The r.fill.dir module can be used not only to fill depression, but also to detect water bodies or potential water bodies based on -the nature of the terrain and the digital elevation model used. +the nature of the terrain and the digital elevation model used.
  • Not all depressions are errors in digital elevation models. In fact, many are wetlands and as Jenkins and McCauley (2006) note careless use of depression filling may lead to unintended consequences such -as loss of wetlands. +as loss of wetlands.
  • Although many hydrological algorithms require depression filling, advanced algorithms such as those implemented in r.watershed and r.sim.water do not require -depressionless digital elevation model to work. +depressionless digital elevation model to work.
  • The flow direction map can be visualized with -d.rast.arrow. +d.rast.arrow.
  • @@ -133,7 +133,7 @@

    EXAMPLES

    r.univar -e elev_lid792_1m_diff # vectorize filled areas (here all fills are of positive value, see r.univar output) -r.mapcalc "elev_lid792_1m_fill_area = if(elev_lid792_1m_diff > 0.0, 1, null() )" +r.mapcalc "elev_lid792_1m_fill_area = if(elev_lid792_1m_diff > 0.0, 1, null() )" r.to.vect input=elev_lid792_1m_fill_area output=elev_lid792_1m_fill_area type=area # generate shaded terrain for better visibility of results @@ -154,18 +154,18 @@

    REFERENCES

    • Beasley, D.B. and L.F. Huggins. 1982. ANSWERS (areal nonpoint source watershed environmental -response simulation): User's manual. U.S. EPA-905/9-82-001, Chicago, IL, 54 p. +response simulation): User's manual. U.S. EPA-905/9-82-001, Chicago, IL, 54 p.
    • Jenkins, D. G., and McCauley, L. A. 2006. GIS, SINKS, FILL, and disappearing wetlands: unintended consequences in algorithm development and use. In Proceedings of the 2006 ACM symposium on applied computing - (pp. 277-282). + (pp. 277-282).
    • Jenson, S.K., and J.O. Domingue. 1988. Extracting topographic structure from digital elevation model data for geographic information system analysis. Photogram. -Engr. and Remote Sens. 54: 1593-1600. +Engr. and Remote Sens. 54: 1593-1600.
    • Young, R.A., C.A. Onstad, D.D. Bosch and W.P. Anderson. 1985. Agricultural nonpoint surface pollution models (AGNPS) I and II model documentation. St. Paul: Minn. Pollution -control Agency and Washington D.C., USDA-Agricultural Research Service. +control Agency and Washington D.C., USDA-Agricultural Research Service.

    SEE ALSO

    diff --git a/raster/r.fill.stats/r.fill.stats.html b/raster/r.fill.stats/r.fill.stats.html index fdc60119243..c8414796e73 100644 --- a/raster/r.fill.stats/r.fill.stats.html +++ b/raster/r.fill.stats/r.fill.stats.html @@ -77,7 +77,7 @@

    DESCRIPTION

    The images below show a gradiometer dataset with gaps and its interpolated equivalent, produced using the spatially weighted mean -operator (mode="wmean"). +operator (mode="wmean").

    @@ -132,7 +132,7 @@

    Usage

    increasing distance from the latter. Another way of explaining this effect is to state that larger "power" settings result in more localized interpolation, smaller ones in more globalized interpolation. -The default setting is power=2.0. +The default setting is power=2.0.

    @@ -171,7 +171,7 @@

    Usage

    neighborhood (defined by the search radius distance) that contain a value in the input map, multiplied by their weights, and dividing the result by the sum of all weights in the neighborhood. -For mode=wmean, this means that interpolated output cells that +For mode=wmean, this means that interpolated output cells that were computed from many nearby input cells have very low uncertainty and vice versa. For all other modes, all weights in the neighborhood are constant "1" and the uncertainty measure is a simple measure of how @@ -198,7 +198,7 @@

    Smoothing

    Effect of smoothing the original data: The top row shows a gap-filled surface computed from a rasterized Lidar point -cloud (using mode=wmean and power=2), and the derived slope, aspect, +cloud (using mode=wmean and power=2), and the derived slope, aspect, and profile curvature maps. The smoothing effect is clearly visible. The bottom row shows the effect of setting the -k flag: Preserving the original cell values in the interpolated output produces and unsmoothed, noisy surface, and likewise @@ -292,11 +292,11 @@

    Spatial weighting scheme

    Note that the weights in such a small window drop rapidly for the -default setting of power=2. +default setting of power=2.

    -If the distance is given in map units (flag -m), then the +If the distance is given in map units (flag -m), then the search window can be modeled more accurately as a circle. The illustration below shows the precomputed weights for a distance in map units that is approximately equivalent to four cells from the center @@ -433,7 +433,7 @@

    Lidar point cloud example

    -Point density and ground surface +Point density and ground surface

    Binning of Lidar and resulting ground surface with filled gaps. Note the remaining NULL cells (white) in the resulting ground surface. @@ -476,7 +476,7 @@

    Outlier removal and gap-filling of SRTM elevation data

    d.histogram elev_srtm_30m # remove SRTM outliers, i.e. SRTM below 50m (esp. lakes), leading to no data areas -r.mapcalc "elev_srtm_30m_filt = if(elev_srtm_30m < 50.0, null(), elev_srtm_30m)" +r.mapcalc "elev_srtm_30m_filt = if(elev_srtm_30m < 50.0, null(), elev_srtm_30m)" d.histogram elev_srtm_30m_filt d.rast elev_srtm_30m_filt diff --git a/raster/r.flow/r.flow.html b/raster/r.flow/r.flow.html index 79f0e7ab4b7..da06e63bf5c 100644 --- a/raster/r.flow/r.flow.html +++ b/raster/r.flow/r.flow.html @@ -32,8 +32,8 @@

    NOTES

    or vice-versa), cross a barrier, or arrive at a cell with undefined elevation or aspect. Another option, skip, indicates that only the flowlines from every val-th cell are to be included -in flowline. The default skip is max(1, <rows -in elevation>/50, <cols in elevation>/50). A +in flowline. The default skip is max(1, <rows +in elevation>/50, <cols in elevation>/50). A high skip usually speeds up processing time and often improves the readability of a visualization of flowline.

    @@ -128,11 +128,11 @@

    Algorithm background

  • r.flow has an option to compute slope and aspect internally thus making the program capable to process much larger data sets than r.flowmd. It has also 2 additional options for handling of large data sets but it is not -known that they work properly. +known that they work properly.
  • the programs handle the special cases when the flowline passes exactly -(or very close) through the grid vertices differently. +(or very close) through the grid vertices differently.
  • r.flowmd has the simplified multiple flow addition so the results are -smoother. +smoother.
  • In conclusion, r.flowmd produces nicer results but is slower and it does not @@ -189,26 +189,26 @@

    REFERENCES

    • Mitasova, H., L. Mitas, 1993, Interpolation by regularized spline with tension : I. Theory and implementation. Mathematical Geology 25, p. 641-655. -(online) +(online)
    • Mitasova and Hofierka 1993 : Interpolation by Regularized Spline with Tension: II. Application to Terrain Modeling and Surface Geometry Analysis. Mathematical Geology 25(6), 657-669 -(online). +(online).
    • Mitasova, H., Mitas, L., Brown, W.M., Gerdes, D.P., Kosinovsky, I., Baker, T., 1995: Modeling spatially and temporally distributed phenomena: New methods and tools for GRASS GIS. International Journal of Geographical -Information Systems 9(4), 433-446. +Information Systems 9(4), 433-446.
    • Mitasova, H., J. Hofierka, M. Zlocha, L.R. Iverson, 1996, Modeling topographic potential for erosion and deposition using GIS. Int. Journal of Geographical Information Science, 10(5), 629-641. (reply to a comment to this paper appears in 1997 in Int. Journal of Geographical Information -Science, Vol. 11, No. 6) +Science, Vol. 11, No. 6)
    • Mitasova, H.(1993): Surfaces and modeling. Grassclippings (winter and -spring) p.18-19. +spring) p.18-19.

    SEE ALSO

    diff --git a/raster/r.geomorphon/r.geomorphon.html b/raster/r.geomorphon/r.geomorphon.html index 54af307b007..36b85ce51fd 100644 --- a/raster/r.geomorphon/r.geomorphon.html +++ b/raster/r.geomorphon/r.geomorphon.html @@ -1,5 +1,8 @@

    DESCRIPTION

    +r.geomorphon calculates terrain forms using machine-vison +technique called geomorphons. +

    What is geomorphon:

    What is geomorphon
    @@ -32,7 +35,7 @@

    What is geomorphon:

    There are 3**8 = 6561 possible ternary patterns (8-tuples). However by eliminating all patterns that are results of either rotation or reflection -of other patterns wa set of 498 patterns remain referred as geomorphons. +of other patterns we get a set of 498 patterns remain referred as geomorphons. This is a comprehensive and exhaustive set of idealized landforms that are independent of the size, relief, and orientation of the actual landform. @@ -54,39 +57,39 @@

    What is geomorphon:

    OPTIONS

    -
    -
    -m
    -
    All distance parameters (search, skip, flat distances) are supplied as meters instead of cells (default). To avoid situation when supplied distances is smaller than one cell program first check if supplied distance is longer than one cell in both NS and WE directions. For LatLong projection only NS distance checked, because in latitude angular unit comprise always bigger or equal distance than longitude one. If distance is supplied in cells, For all projections is recalculated into meters according formula: number_of_cells*resolution_along_NS_direction. It is important if geomorphons are calculated for large areas in LatLong projection.
    -
    elevation
    -
    Digital elevation model. Data can be of any type and any projection. During calculation DEM is stored as floating point raster.
    -
    search
    -
    Determines length on the geodesic distances in all eight directions where line-of-sight is calculated. To speed up calculation is determines only these cells which centers falls into the distance.
    -
    skip
    -
    Determines length on the geodesic distances at the beginning of calculation all eight directions where line-of-sight is yet calculated. To speed up calculation this distance is always recalculated into number of cell which are skipped at the beginning of every line-of-sight and is equal in all direction. This parameter eliminates forms of very small extend, smaller than skip parameter.
    -
    flat
    -
    The difference (in degrees) between zenith and nadir line-of-sight which indicate flat direction. If higher threshold produce more flat maps. If resolution of the map is low (more than 1 km per cell) threshold should be very small (much smaller than 1 degree) because on such distance 1 degree of difference means several meters of high difference.
    -
    dist
    -
    >Flat distance. This is additional parameter defining the distance above which the threshold starts to decrease to avoid problems with pseudo-flat line-of-sights if real elevation difference appears on the distance where its value is higher (TO BE CORRECTED).
    -
    comparison
    -
    Comparison mode for zenith/nadir line-of-sight search. "anglev1" is +
    +
    -m
    +
    All distance parameters (search, skip, flat distances) are supplied as meters instead of cells (default). To avoid situation when supplied distances is smaller than one cell program first check if supplied distance is longer than one cell in both NS and WE directions. For LatLong projection only NS distance checked, because in latitude angular unit comprise always bigger or equal distance than longitude one. If distance is supplied in cells, For all projections is recalculated into meters according formula: number_of_cells*resolution_along_NS_direction. It is important if geomorphons are calculated for large areas in LatLong projection.
    +
    elevation
    +
    Digital elevation model. Data can be of any type and any projection. During calculation DEM is stored as floating point raster.
    +
    search
    +
    Determines length on the geodesic distances in all eight directions where line-of-sight is calculated. To speed up calculation is determines only these cells which centers falls into the distance.
    +
    skip
    +
    Determines length on the geodesic distances at the beginning of calculation all eight directions where line-of-sight is yet calculated. To speed up calculation this distance is always recalculated into number of cell which are skipped at the beginning of every line-of-sight and is equal in all direction. This parameter eliminates forms of very small extend, smaller than skip parameter.
    +
    flat
    +
    The difference (in degrees) between zenith and nadir line-of-sight which indicate flat direction. If higher threshold produce more flat maps. If resolution of the map is low (more than 1 km per cell) threshold should be very small (much smaller than 1 degree) because on such distance 1 degree of difference means several meters of high difference.
    +
    dist
    +
    Flat distance. This is additional parameter defining the distance above which the threshold starts to decrease to avoid problems with pseudo-flat line-of-sights if real elevation difference appears on the distance where its value is higher (TO BE CORRECTED).
    +
    comparison
    +
    Comparison mode for zenith/nadir line-of-sight search. "anglev1" is the original r.geomorphon comparison mode. "anglev2" is an improved mode, which better handles angle thresholds and zenith/nadir angles that are exactly equal. "anglev2_distance" in addition to that takes the zenith/nadir distances into account when the angles are exactly -equal.
    -
    forms
    -
    Returns geomorphic map with 10 most popular terrestrial forms. Legend for forms, its definition by the number of + and - and its idealized visualisation are presented at the image. +equal.
    +
    forms
    +
    Returns geomorphic map with 10 most popular terrestrial forms. Legend for forms, its definition by the number of + and - and its idealized visualisation are presented at the image.

    Forms represented by geomorphons:

    forms legend
    -
    -
    ternary
    -
    returns code of one of 498 unique ternary patterns for every cell. The code is a decimal representation of 8-tuple minimalised patterns written in ternary system. Full list of patterns is available in source code directory as patterns.txt. This map can be used to create alternative form classification using supervised approach.
    -
    positive and negative
    -
    returns codes binary patterns for zenith (positive) and nadir (negative) line of sights. The code is a decimal representation of 8-tuple minimalised patterns written in binary system. Full list of patterns is available in source code directory as patterns.txt.
    -
    coordinates
    -
    The central point of a single geomorphon to profile. The central +
    +
    ternary
    +
    returns code of one of 498 unique ternary patterns for every cell. The code is a decimal representation of 8-tuple minimalised patterns written in ternary system. Full list of patterns is available in source code directory as patterns.txt. This map can be used to create alternative form classification using supervised approach.
    +
    positive and negative
    +
    returns codes binary patterns for zenith (positive) and nadir (negative) line of sights. The code is a decimal representation of 8-tuple minimalised patterns written in binary system. Full list of patterns is available in source code directory as patterns.txt.
    +
    coordinates
    +
    The central point of a single geomorphon to profile. The central point must be within the computational region, which should be large enough to accommodate the search radius. Setting the region larger than that will not produce more accurate data, but in the current @@ -94,32 +97,32 @@

    Forms represented by geomorphons:

    remember to align the region to the raster cells. Profiling is mutually exclusive with any raster outputs, but other parameters and flags (such as elevation, search, comparison, -m and --e) work as usual.
    -
    profiledata
    -
    The output file name for the complete profile data, "-" means to +-e) work as usual.
    +
    profiledata
    +
    The output file name for the complete profile data, "-" means to write to the standard output. The data is in a machine-readable format and it includes assorted values describing the computation context and -parameters, as well as its intermediate and final results.
    -
    profileformat
    -
    Format of the profile data: "json", "yaml" or "xml".
    - -

    NOTE: parameters below are experimental. The usefulness of these parameters are currently under investigation.

    -
    -
    intensity
    -
    returns average difference between central cell of geomorphon and eight cells in visibility neighbourhood. This parameter shows local (as is visible) exposition/abasement of the form in the terrain.
    -
    range
    -
    returns difference between minimum and maximum values of visibility neighbourhood.
    -
    variance
    -
    returns variance (difference between particular values and mean value) of visibility neighbourhood.
    -
    extend
    -
    returns area of the polygon created by the 8 points where line-of-sight cuts the terrain (see image in description section).
    -
    azimuth
    -
    returns orientation of the polygon constituting geomorphon. This orientation is currently calculated as a orientation of least square fit line to the eight verticles of this polygon.
    -
    elongation
    -
    returns proportion between sides of the bounding box rectangle calculated for geomorphon rotated to fit least square line.
    -
    width
    -
    returns length of the shorter side of the bounding box rectangle calculated for geomorphon rotated to fit least square line.
    -
    +parameters, as well as its intermediate and final results. +
    profileformat
    +
    Format of the profile data: "json", "yaml" or "xml".
    + +

    NOTE: parameters below are experimental. The usefulness of these parameters are currently under investigation.

    +
    +
    intensity
    +
    returns average difference between central cell of geomorphon and eight cells in visibility neighbourhood. This parameter shows local (as is visible) exposition/abasement of the form in the terrain.
    +
    range
    +
    returns difference between minimum and maximum values of visibility neighbourhood.
    +
    variance
    +
    returns variance (difference between particular values and mean value) of visibility neighbourhood.
    +
    extend
    +
    returns area of the polygon created by the 8 points where line-of-sight cuts the terrain (see image in description section).
    +
    azimuth
    +
    returns orientation of the polygon constituting geomorphon. This orientation is currently calculated as a orientation of least square fit line to the eight verticles of this polygon.
    +
    elongation
    +
    returns proportion between sides of the bounding box rectangle calculated for geomorphon rotated to fit least square line.
    +
    width
    +
    returns length of the shorter side of the bounding box rectangle calculated for geomorphon rotated to fit least square line.
    +

    NOTES

    @@ -184,7 +187,7 @@

    REFERENCES

  • Stepinski, T., Jasiewicz, J., 2011, Geomorphons - a new approach to classification of landform, in : Eds: Hengl, T., Evans, I.S., Wilson, J.P., and Gould, M., Proceedings of Geomorphometry 2011, Redlands, -109-112 (PDF)
  • +109-112 (PDF)
  • Jasiewicz, J., Stepinski, T., 2013, Geomorphons - a pattern recognition approach to classification and mapping of landforms, Geomorphology, vol. 182, 147-156 (DOI: 10.1016/j.geomorph.2012.11.005)
  • diff --git a/raster/r.grow.distance/r.grow.distance.html b/raster/r.grow.distance/r.grow.distance.html index ee5e276a550..0b6866be1e6 100644 --- a/raster/r.grow.distance/r.grow.distance.html +++ b/raster/r.grow.distance/r.grow.distance.html @@ -91,12 +91,12 @@

    Distance from the streams network

    -
    +
    Euclidean distance from the streams network in meters (map subset)
    -
    +
    Euclidean distance from the streams network in meters (detail, numbers shown with d.rast.num)
    @@ -111,7 +111,7 @@

    Distance from sea in meters in latitude-longitude CRS

    -
    +
    Geodesic distances to sea in meters
    diff --git a/raster/r.gwflow/r.gwflow.html b/raster/r.gwflow/r.gwflow.html index 2dbef39f74a..3edc76354eb 100644 --- a/raster/r.gwflow/r.gwflow.html +++ b/raster/r.gwflow/r.gwflow.html @@ -1,7 +1,7 @@

    DESCRIPTION

    -This numerical program calculates implicit transient, confined and -unconfined groundwater flow in two dimensions based on +r.gwflow is a numerical program which calculates implicit transient, +confined and unconfined groundwater flow in two dimensions based on raster maps and the current region settings. All initial and boundary conditions must be provided as raster maps. The unit of the current project's coordinate reference system must be meters. @@ -95,7 +95,7 @@

    EXAMPLE

    #now create the input raster maps for confined and unconfined aquifers r.mapcalc expression="phead = if(row() == 1 , 50, 40)" r.mapcalc expression="status = if(row() == 1 , 2, 1)" -r.mapcalc expression="well = if(row() == 20 && col() == 20 , -0.01, 0)" +r.mapcalc expression="well = if(row() == 20 && col() == 20 , -0.01, 0)" r.mapcalc expression="hydcond = 0.00025" r.mapcalc expression="recharge = 0" r.mapcalc expression="top_conf = 20.0" diff --git a/raster/r.his/r.his.html b/raster/r.his/r.his.html index 0dd2c429e04..964ac2d2273 100644 --- a/raster/r.his/r.his.html +++ b/raster/r.his/r.his.html @@ -1,7 +1,7 @@

    DESCRIPTION

    HIS stands for hue, intensity, and saturation. -This program produces red, green and blue raster map layers +r.his produces red, green and blue raster map layers providing a visually pleasing combination of hue, intensity, and saturation values from two or three user-specified raster map layers. diff --git a/raster/r.horizon/r.horizon.html b/raster/r.horizon/r.horizon.html index 513af64b8c4..e7ebcf68783 100644 --- a/raster/r.horizon/r.horizon.html +++ b/raster/r.horizon/r.horizon.html @@ -7,10 +7,10 @@

    DESCRIPTION

    • point: as a series of horizon heights in the specified directions from the given point(s). The results are -written to the stdout. +written to the stdout.
    • raster: in this case the output is one or more raster maps, with each point in a raster giving the horizon -height in a specific direction. One raster is created for each direction. +height in a specific direction. One raster is created for each direction.

    @@ -31,7 +31,7 @@

    DESCRIPTION

    orientation (North=0, clockwise).

    -Activating the -l flag allows to additionally print the distance +Activating the -l flag allows additionally printing the distance to each horizon angle.

    Input parameters:

    @@ -258,18 +258,18 @@

    REFERENCES

    Hofierka J., Huld T., Cebecauer T., Suri M., 2007. Open Source Solar Radiation Tools for Environmental and Renewable Energy Applications, -International Symposium on +International Symposium on Environmental Software Systems, Prague, 2007

    Neteler M., Mitasova H., 2004. Open Source GIS: A GRASS GIS -Approach, Springer, New York. +Approach, Springer, New York. ISBN: 1-4020-8064-6, 2nd Edition 2004 (reprinted 2005), 424 pages -

    Project PVGIS, European +

    Project PVGIS, European Commission, DG Joint Research Centre 2001-2007

    Suri M., Hofierka J., 2004. A New GIS-based Solar Radiation Model and Its Application for -Photovoltaic Assessments. Transactions -in GIS, 8(2), 175-190 +Photovoltaic Assessments. Transactions +in GIS, 8(2), 175-190 (DOI: 10.1111/j.1467-9671.2004.00174.x)

    SEE ALSO

    diff --git a/raster/r.in.ascii/r.in.ascii.html b/raster/r.in.ascii/r.in.ascii.html index 0f450de5fd4..eacf22ef81e 100644 --- a/raster/r.in.ascii/r.in.ascii.html +++ b/raster/r.in.ascii/r.in.ascii.html @@ -52,7 +52,7 @@

    NOTES

    The data (which follows the header section) must contain -r x c values, but it is not necessary +r x c values, but it is not necessary that all the data for a row be on one line. A row may be split over many lines. diff --git a/raster/r.in.bin/r.in.bin.html b/raster/r.in.bin/r.in.bin.html index 9c57e38f98b..2d457b1680e 100644 --- a/raster/r.in.bin/r.in.bin.html +++ b/raster/r.in.bin/r.in.bin.html @@ -10,7 +10,7 @@

    DESCRIPTION

    The north, south, east, and west field values are the coordinates of the edges of the geographic region. The rows and cols values describe the dimensions of the matrix of data to follow. If the input is a -GMT binary array +GMT binary array (-h flag), the six dimension fields (north, south, east, west, rows and cols) are obtained from the GMT header. If the bytes field is entered incorrectly an error will be generated suggesting a closer bytes value. diff --git a/raster/r.in.gdal/r.in.gdal.html b/raster/r.in.gdal/r.in.gdal.html index 5af167a78e0..7083a48410e 100644 --- a/raster/r.in.gdal/r.in.gdal.html +++ b/raster/r.in.gdal/r.in.gdal.html @@ -20,7 +20,7 @@

    GDAL supported raster formats

    Full details on all GDAL supported formats are available at:

    -https://gdal.org/formats_list.html +https://gdal.org/en/stable/drivers/raster/

    Selected formats out of the more than 140 supported formats: @@ -132,8 +132,8 @@

    Project Creation

    information when importing datasets if the source format includes CRS information, and if the GDAL driver supports it. If the CRS of the source dataset does not match the CRS of the current project r.in.gdal will -report an error message (Coordinate reference system of dataset does not appear to -match current project) and then report the PROJ_INFO parameters of +report an error message (Coordinate reference system of dataset does not appear to +match current project) and then report the PROJ_INFO parameters of the source dataset.

    @@ -162,7 +162,7 @@

    Support for Ground Control Points

    If the target project does not exist, a new project will be created matching the CRS definition of the GCPs. The target of the output group will be set to the new project, and -i.rectify can now be used without any further +i.rectify can now be used without any further preparation.

    Some satellite images (e.g. NOAA/AVHRR, ENVISAT) can contain hundreds @@ -177,8 +177,8 @@

    Map names: Management of offset and leading zeros

    across different input files.

    The num_digits parameter allows defining the number of leading zeros -(zero padding) in case of band numbers (e.g., to turn band.1 into -band.001). +(zero padding) in case of band numbers (e.g., to turn band.1 into +band.001).

    NOTES

    @@ -252,7 +252,7 @@

    Error Messages

    "ERROR: Input map is rotated - cannot import."
    In this case the image must be first externally rotated, applying the rotation info stored in the metadata field of the raster image file. For example, the -gdalwarp software can be used +gdalwarp software can be used to transform the map to North-up (note, there are several gdalwarp parameters to select the resampling algorithm): @@ -296,9 +296,8 @@

    EXAMPLES

    ECAD Data

    -The European Climate Assessment and Dataset (ECAD) project -provides climate data for Europe ranging from 1950 - 2015 or later -(Terms of use). +The European Climate Assessment and Dataset (ECAD) project +provides climate data for Europe ranging from 1950 - 2015 or later. To import the different chunks of data provided by the project as netCDF files, the offset parameter can be used to properly assign numbers to the series of daily raster maps from 1st Jan 1950 (in case of importing the ECAD data @@ -363,8 +362,8 @@

    GLOBE DEM

    Raster file import over network

    Since GDAL 2.x it is possible to import raster data over the network -(see GDAL Virtual File Systems) -including Cloud Optimized GeoTIFF, +(see GDAL Virtual File Systems) +including Cloud Optimized GeoTIFF, i.e. access uncompressed and compressed raster data via a http(s) or ftp connection. As an example the import of the global SRTMGL1 V003 tiles at 1 arc second (about 30 meters) resolution, void-filled: @@ -398,7 +397,7 @@

    HDF

    REFERENCES

    -GDAL Pages: https://gdal.org/ +GDAL Pages: https://gdal.org

    SEE ALSO

    @@ -416,5 +415,5 @@

    SEE ALSO

    AUTHOR

    -Frank Warmerdam +Frank Warmerdam (email). diff --git a/raster/r.in.lidar/r.in.lidar.html b/raster/r.in.lidar/r.in.lidar.html index 6cff187c089..a3346047bec 100644 --- a/raster/r.in.lidar/r.in.lidar.html +++ b/raster/r.in.lidar/r.in.lidar.html @@ -125,7 +125,7 @@

    Statistics

    coeff_var
    This computes the coefficient of variance of point values for each cell. Coefficient of variance is given in percentage and defined as -(stddev/mean)*100.
    +(stddev/mean)*100.
    median
    This computes the median of point values for each cell
    percentile
    @@ -210,7 +210,7 @@

    Filtering

    The user can use a combination of r.in.lidar output maps to create custom raster-based filters, for example, use r.mapcalc to create -a mean-(2*stddev) map. (In this example the user may want to +a mean-(2*stddev) map. (In this example the user may want to include a lower bound filter in r.mapcalc to remove highly variable points (small n) or run r.neighbors to smooth the stddev map before further use.) @@ -332,10 +332,10 @@

    Memory consumption

    (> 10000x10000 pixels). If the module refuses to start complaining that there isn't enough memory, use the percent parameter to run the module in several passes. -In addition using a less precise map format (CELL [integer] or -FCELL [floating point]) will use less memory than a DCELL +In addition using a less precise map format (CELL [integer] or +FCELL [floating point]) will use less memory than a DCELL [double precision floating point] output map. -For method=n, the CELL format is used +For method=n, the CELL format is used automatically.

    @@ -356,7 +356,7 @@

    Memory consumption

    the number of data points.

    -The default map type=FCELL is intended as compromise between +The default map type=FCELL is intended as compromise between preserving data precision and limiting system resource consumption.

    Trim option

    @@ -454,28 +454,28 @@

    Serpent Mound dataset

    This example is analogous to the example used in the GRASS wiki page for importing LAS as raster DEM. -

    The sample LAS data are in the file "Serpent Mound Model LAS Data.las", +

    The sample LAS data are in the file "Serpent Mound Model LAS Data.laz", available at -appliedimagery.com: +Serpent Mound Model LAS Data.laz:

     # print LAS file info
    -r.in.lidar -p input="Serpent Mound Model LAS Data.las"
    +r.in.lidar -p input="Serpent Mound Model LAS Data.laz"
     
     # using v.in.lidar to create a new project
     # create project with CRS information of the LAS data
    -v.in.lidar -i input="Serpent Mound Model LAS Data.las" project=Serpent_Mound
    +v.in.lidar -i input="Serpent Mound Model LAS Data.laz" project=Serpent_Mound
     
     # quit and restart GRASS in the newly created project "Serpent_Mound"
     
     # scan the extents of the LAS data
    -r.in.lidar -sg input="Serpent Mound Model LAS Data.las"
    +r.in.lidar -sg input="Serpent Mound Model LAS Data.laz"
     
     # set the region to the extents of the LAS data, align to resolution
     g.region n=4323641.57 s=4320942.61 w=289020.90 e=290106.02 res=1 -ap
     
     # import as raster DEM
    -r.in.lidar input="Serpent Mound Model LAS Data.las" \
    +r.in.lidar input="Serpent Mound Model LAS Data.laz" \
                output=Serpent_Mound_Model_LAS_Data method=mean
     
    @@ -488,7 +488,7 @@

    Height above ground

    The mean height above ground of the points can be computed for each raster cell (the ground elevation is given by the raster map -elevation): +elevation):
     g.region raster=elevation -p
    @@ -517,11 +517,11 @@ 

    Multiple file input

    On Linux and OSX, this file can be automatically generated with the command:
    -ls /home/user/data/*.laz > /home/user/data/filelist.txt
    +ls /home/user/data/*.laz > /home/user/data/filelist.txt
     
    On Windows:
    -dir /b c:\users\user\data\*.laz > c:\users\user\data\filelist.txt
    +dir /b c:\users\user\data\*.laz > c:\users\user\data\filelist.txt
     
    The mean height above ground example above would then be: @@ -551,14 +551,14 @@

    Multiple file input

    KNOWN ISSUES

      -
    • The "nan" value (as defined in C language) can leak into +
    • The "nan" value (as defined in C language) can leak into coeff_var raster maps. Cause is unknown. Possible - work-around is: r.null setnull=nan or - r.mapcalc 'no_nan = if(map == map, map, null())'. + work-around is: r.null setnull=nan or + r.mapcalc 'no_nan = if(map == map, map, null())'.
    • Only one method can be applied for a single run and multiple map output from a single run - (e.g. method=string[,string,...] output=name[,name,...] - or n=string mean=string) is no supported. + (e.g. method=string[,string,...] output=name[,name,...] + or n=string mean=string) is no supported.
    • @@ -597,14 +597,14 @@

      REFERENCES

    • V. Petras, A. Petrasova, J. Jeziorska, H. Mitasova (2016): Processing UAV and lidar point clouds in GRASS GIS. -XXIII ISPRS Congress 2016 [ISPRS Archives, ResearchGate] +XXIII ISPRS Congress 2016 [ISPRS Archives, ResearchGate]
    • -ASPRS LAS format +ASPRS LAS format
    • -LAS library +LAS library
    • -LAS library C API documentation +LAS library C API documentation

    AUTHORS

    diff --git a/raster/r.in.mat/r.in.mat.html b/raster/r.in.mat/r.in.mat.html index b0bba2cb478..d5613c1ed61 100644 --- a/raster/r.in.mat/r.in.mat.html +++ b/raster/r.in.mat/r.in.mat.html @@ -7,13 +7,13 @@

    DESCRIPTION


    Specifically, the following array variables will be read:
      -
    • map_data -
    • map_name -
    • map_title -
    • map_northern_edge -
    • map_southern_edge -
    • map_eastern_edge -
    • map_western_edge +
    • map_data
    • +
    • map_name
    • +
    • map_title
    • +
    • map_northern_edge
    • +
    • map_southern_edge
    • +
    • map_eastern_edge
    • +
    • map_western_edge
    Any other variables in the MAT-file will be simply skipped over.
    @@ -21,7 +21,7 @@

    DESCRIPTION

    The 'map_name' variable is optional, if it exists, and is valid, the new map will be thus named. If it doesn't exist or a name is specified with the output= option, the raster map's name will be set to -"MatFile" or the name specified respectively. +"MatFile" or the name specified respectively. (maximum 64 characters; normal GRASS naming rules apply)

    @@ -43,9 +43,9 @@

    NOTES

    copied between different system architectures without binary translation (caveat: see "TODO" below).

    -As there is no IEEE value for NaN in integer arrays, GRASS's null +As there is no IEEE value for NaN in integer arrays, GRASS's null value may be used to represent it within these maps. Usually Matlab will save -any integer based matrix with NaN values as a double-precision +any integer based matrix with NaN values as a double-precision floating point array, so this usually isn't an issue. To save space, once the map is loaded into GRASS you can convert it back to an integer map with the following command: @@ -53,7 +53,7 @@

    NOTES

    r.mapcalc "int_map = int(MATFile_map)" -NaN values in either floating point or double-precision floating point +NaN values in either floating point or double-precision floating point matrices should translate into null values as expected.

    @@ -73,8 +73,8 @@

    NOTES


    -Remember Matlab arrays are referenced as (row,column), -i.e. (y,x). +Remember Matlab arrays are referenced as (row,column), +i.e. (y,x).

    In addition, r.in.mat and r.out.mat make for a nice binary container format for transferring georeferenced maps around, diff --git a/raster/r.in.pdal/grassrasterwriter.h b/raster/r.in.pdal/grassrasterwriter.h index 4db8852aa8a..0ec1da774a3 100644 --- a/raster/r.in.pdal/grassrasterwriter.h +++ b/raster/r.in.pdal/grassrasterwriter.h @@ -32,7 +32,7 @@ extern "C" { #include /* Binning code wrapped as a PDAL Writer class */ -#ifdef HAVE_PDAL_NOFILENAMEWRITER +#if PDAL_VERSION_MAJOR >= 2 && PDAL_VERSION_MINOR >= 7 class GrassRasterWriter : public pdal::NoFilenameWriter, public pdal::Streamable { #else diff --git a/raster/r.in.pdal/info.cpp b/raster/r.in.pdal/info.cpp index 47ebd01fe28..d9f506a9ace 100644 --- a/raster/r.in.pdal/info.cpp +++ b/raster/r.in.pdal/info.cpp @@ -1,7 +1,7 @@ /* * r.in.pdal Functions printing out various information on input LAS files * - * Copyright 2021 by Maris Nartiss, and The GRASS Development Team + * Copyright 2021-2024 by Maris Nartiss, and The GRASS Development Team * Author: Maris Nartiss * * This program is free software licensed under the GPL (>=v2). @@ -12,8 +12,14 @@ #include "info.h" #include +#ifdef PDAL_USE_NOSRS +void get_extent(struct StringList *infiles, double *min_x, double *max_x, + double *min_y, double *max_y, double *min_z, double *max_z, + bool nosrs) +#else void get_extent(struct StringList *infiles, double *min_x, double *max_x, double *min_y, double *max_y, double *min_z, double *max_z) +#endif { pdal::StageFactory factory; bool first = 1; @@ -25,15 +31,27 @@ void get_extent(struct StringList *infiles, double *min_x, double *max_x, std::string pdal_read_driver = factory.inferReaderDriver(infile); if (pdal_read_driver.empty()) - G_fatal_error("Cannot determine input file type of <%s>", infile); + G_fatal_error(_("Cannot determine input file type of <%s>"), + infile); pdal::PointTable table; pdal::Options las_opts; pdal::Option las_opt("filename", infile); las_opts.add(las_opt); +#ifdef PDAL_USE_NOSRS + if (nosrs) { + pdal::Option nosrs_opt("nosrs", true); + las_opts.add(nosrs_opt); + } +#endif pdal::LasReader las_reader; las_reader.setOptions(las_opts); - las_reader.prepare(table); + try { + las_reader.prepare(table); + } + catch (const std::exception &err) { + G_fatal_error(_("PDAL error: %s"), err.what()); + } const pdal::LasHeader &las_header = las_reader.header(); if (first) { *min_x = las_header.minX(); @@ -62,16 +80,28 @@ void get_extent(struct StringList *infiles, double *min_x, double *max_x, } } +#ifdef PDAL_USE_NOSRS +void print_extent(struct StringList *infiles, bool nosrs) +#else void print_extent(struct StringList *infiles) +#endif { double min_x, max_x, min_y, max_y, min_z, max_z; +#ifdef PDAL_USE_NOSRS + get_extent(infiles, &min_x, &max_x, &min_y, &max_y, &min_z, &max_z, nosrs); +#else get_extent(infiles, &min_x, &max_x, &min_y, &max_y, &min_z, &max_z); +#endif fprintf(stdout, "n=%f s=%f e=%f w=%f b=%f t=%f\n", max_y, min_y, max_x, min_x, min_z, max_z); } +#ifdef PDAL_USE_NOSRS +void print_lasinfo(struct StringList *infiles, bool nosrs) +#else void print_lasinfo(struct StringList *infiles) +#endif { pdal::StageFactory factory; pdal::MetadataNode meta_node; @@ -86,15 +116,27 @@ void print_lasinfo(struct StringList *infiles) std::string pdal_read_driver = factory.inferReaderDriver(infile); if (pdal_read_driver.empty()) - G_fatal_error("Cannot determine input file type of <%s>", infile); + G_fatal_error(_("Cannot determine input file type of <%s>"), + infile); pdal::PointTable table; pdal::Options las_opts; pdal::Option las_opt("filename", infile); las_opts.add(las_opt); +#ifdef PDAL_USE_NOSRS + if (nosrs) { + pdal::Option nosrs_opt("nosrs", true); + las_opts.add(nosrs_opt); + } +#endif pdal::LasReader las_reader; las_reader.setOptions(las_opts); - las_reader.prepare(table); + try { + las_reader.prepare(table); + } + catch (const std::exception &err) { + G_fatal_error(_("PDAL error: %s"), err.what()); + } const pdal::LasHeader &h = las_reader.header(); pdal::PointLayoutPtr point_layout = table.layout(); const pdal::Dimension::IdList &dims = point_layout->dims(); @@ -115,9 +157,9 @@ void print_lasinfo(struct StringList *infiles) std::cout << "Point format: " << (int)h.pointFormat() << "\n"; std::cout << "Point offset: " << h.pointOffset() << "\n"; std::cout << "Point count: " << h.pointCount() << "\n"; - for (size_t i = 0; i < pdal::LasHeader::RETURN_COUNT; ++i) - std::cout << "Point count by return[" << i + 1 << "]: " - << const_cast(h).pointCountByReturn(i) + for (size_t k = 0; k < pdal::LasHeader::RETURN_COUNT; ++k) + std::cout << "Point count by return[" << k + 1 << "]: " + << const_cast(h).pointCountByReturn(k) << "\n"; std::cout << "Scales X/Y/Z: " << h.scaleX() << "/" << h.scaleY() << "/" << h.scaleZ() << "\n"; diff --git a/raster/r.in.pdal/info.h b/raster/r.in.pdal/info.h index 7f8d0138ab1..387efd21ef0 100644 --- a/raster/r.in.pdal/info.h +++ b/raster/r.in.pdal/info.h @@ -27,15 +27,29 @@ #pragma clang diagnostic pop #endif +#include +#if (PDAL_VERSION_MAJOR >= 2 && PDAL_VERSION_MINOR > 4) || \ + (PDAL_VERSION_MAJOR == 2 && PDAL_VERSION_MINOR == 4 && \ + PDAL_VERSION_PATCH == 3) +#define PDAL_USE_NOSRS 1 +#endif + extern "C" { #include #include #include "string_list.h" } +#ifdef PDAL_USE_NOSRS +void get_extent(struct StringList *, double *, double *, double *, double *, + double *, double *, bool); +void print_extent(struct StringList *, bool); +void print_lasinfo(struct StringList *, bool); +#else void get_extent(struct StringList *, double *, double *, double *, double *, double *, double *); void print_extent(struct StringList *); void print_lasinfo(struct StringList *); +#endif #endif // INFO_H diff --git a/raster/r.in.pdal/main.cpp b/raster/r.in.pdal/main.cpp index 36d09558eea..fe17e80cb08 100644 --- a/raster/r.in.pdal/main.cpp +++ b/raster/r.in.pdal/main.cpp @@ -4,12 +4,12 @@ * * AUTHOR(S): Vaclav Petras * Based on r.in.xyz and r.in.lidar by Markus Metz, - * Hamish Bowman, Volker Wichmann + * Hamish Bowman, Volker Wichmann, Maris Nartiss * * PURPOSE: Imports LAS LiDAR point clouds to a raster map using * aggregate statistics. * - * COPYRIGHT: (C) 2019-2021 by Vaclav Petras and the GRASS Development Team + * COPYRIGHT: (C) 2019-2024 by Vaclav Petras and the GRASS Development Team * * This program is free software under the GNU General Public * License (>=v2). Read the file COPYING that comes with @@ -446,12 +446,20 @@ int main(int argc, char *argv[]) /* If we print extent, there is no need to validate rest of the input */ if (print_extent_flag->answer) { +#ifdef PDAL_USE_NOSRS + print_extent(&infiles, over_flag->answer); +#else print_extent(&infiles); +#endif exit(EXIT_SUCCESS); } if (print_info_flag->answer) { +#ifdef PDAL_USE_NOSRS + print_lasinfo(&infiles, over_flag->answer); +#else print_lasinfo(&infiles); +#endif exit(EXIT_SUCCESS); } @@ -507,7 +515,12 @@ int main(int argc, char *argv[]) if (extents_flag->answer) { double min_x, max_x, min_y, max_y, min_z, max_z; +#ifdef PDAL_USE_NOSRS + get_extent(&infiles, &min_x, &max_x, &min_y, &max_y, &min_z, &max_z, + over_flag->answer); +#else get_extent(&infiles, &min_x, &max_x, &min_y, &max_y, &min_z, &max_z); +#endif region.east = xmax = max_x; region.west = xmin = min_x; @@ -711,16 +724,24 @@ int main(int argc, char *argv[]) std::string pdal_read_driver = factory.inferReaderDriver(infile); if (pdal_read_driver.empty()) - G_fatal_error("Cannot determine input file type of <%s>", infile); + G_fatal_error(_("Cannot determine input file type of <%s>"), + infile); pdal::Options las_opts; pdal::Option las_opt("filename", infile); las_opts.add(las_opt); +#ifdef PDAL_USE_NOSRS + if (over_flag->answer) { + pdal::Option nosrs_opt("nosrs", true); + las_opts.add(nosrs_opt); + } +#endif // stages created by factory are destroyed with the factory pdal::Stage *reader = factory.createStage(pdal_read_driver); if (!reader) - G_fatal_error("PDAL reader creation failed, a wrong format of <%s>", - infile); + G_fatal_error( + _("PDAL reader creation failed, a wrong format of <%s>"), + infile); reader->setOptions(las_opts); readers.push_back(reader); merge_filter.setInput(*reader); @@ -779,7 +800,12 @@ int main(int argc, char *argv[]) // consumption, so using 10k in case it is faster for some cases pdal::point_count_t point_table_capacity = 10000; pdal::FixedPointTable point_table(point_table_capacity); - binning_writer.prepare(point_table); + try { + binning_writer.prepare(point_table); + } + catch (const std::exception &err) { + G_fatal_error(_("PDAL error: %s"), err.what()); + } // getting projection is possible only after prepare if (over_flag->answer) { diff --git a/raster/r.in.pdal/r.in.pdal.html b/raster/r.in.pdal/r.in.pdal.html index 92cd25205e8..61afe778c6a 100644 --- a/raster/r.in.pdal/r.in.pdal.html +++ b/raster/r.in.pdal/r.in.pdal.html @@ -123,7 +123,7 @@

    Statistics

    coeff_var
    This computes the coefficient of variance of point values for each cell. Coefficient of variance is given in percentage and defined as -100 * sqrt(variance) / mean.
    +100 * sqrt(variance) / mean.
    median
    This computes the median of point values for each cell
    mode
    @@ -238,7 +238,7 @@

    Filtering and selection

    The user can use a combination of r.in.pdal output maps to create custom raster-based filters, for example, use r.mapcalc to create -a mean-(2*stddev) map. (In this example the user may want to +a mean-(2*stddev) map. (In this example the user may want to include a lower bound filter in r.mapcalc to remove highly variable points (small n) or run r.neighbors to smooth the stddev map before further use.) @@ -394,10 +394,10 @@

    Memory consumption

    (> 10000x10000 pixels). If the module refuses to start complaining that there isn't enough memory, use the percent parameter to run the module in several passes. -In addition using a less precise map format (CELL [integer] or -FCELL [floating point]) will use less memory than a DCELL +In addition using a less precise map format (CELL [integer] or +FCELL [floating point]) will use less memory than a DCELL [double precision floating point] output map. -For methods=n, mode, sidnmin, sidnmax, the CELL +For methods=n, mode, sidnmin, sidnmax, the CELL format is used automatically.

    @@ -418,7 +418,7 @@

    Memory consumption

    the number of data points.

    -The default map type=FCELL is intended as compromise between +The default map type=FCELL is intended as compromise between preserving data precision and limiting system resource consumption.

    Trim option

    @@ -512,28 +512,28 @@

    Serpent Mound dataset

    This example is analogous to the example used in the GRASS wiki page for importing LAS as raster DEM. -

    The sample LAS data are in the file "Serpent Mound Model LAS Data.las", +

    The sample LAS data are in the file "Serpent Mound Model LAS Data.laz", available at -appliedimagery.com: +Serpent Mound Model LAS Data.laz:

     # print LAS file info
    -r.in.pdal -p input="Serpent Mound Model LAS Data.las"
    +r.in.pdal -p input="Serpent Mound Model LAS Data.laz"
     
     # using v.in.lidar to create a new project
     # create a project with CRS information of the LAS data
    -v.in.lidar -i input="Serpent Mound Model LAS Data.las" project=Serpent_Mound
    +v.in.lidar -i input="Serpent Mound Model LAS Data.laz" project=Serpent_Mound
     
     # quit and restart GRASS in the newly created project "Serpent_Mound"
     
     # scan the extents of the LAS data
    -r.in.pdal -g input="Serpent Mound Model LAS Data.las"
    +r.in.pdal -g input="Serpent Mound Model LAS Data.laz"
     
     # set the region to the extents of the LAS data, align to resolution
     g.region n=4323641.57 s=4320942.61 w=289020.90 e=290106.02 res=1 -ap
     
     # import as raster DEM
    -r.in.pdal input="Serpent Mound Model LAS Data.las" \
    +r.in.pdal input="Serpent Mound Model LAS Data.laz" \
                output=Serpent_Mound_Model_LAS_Data method=mean
     
    @@ -546,7 +546,7 @@

    Height above ground

    The mean height above ground of the points can be computed for each raster cell (the ground elevation is given by the raster map -elevation): +elevation):
     g.region raster=elevation -p
    @@ -575,11 +575,11 @@ 

    Multiple file input

    On Linux and OSX, this file can be automatically generated with the command:
    -ls /home/user/data/*.laz > /home/user/data/filelist.txt
    +ls /home/user/data/*.laz > /home/user/data/filelist.txt
     
    On Windows:
    -dir /b c:\users\user\data\*.laz > c:\users\user\data\filelist.txt
    +dir /b c:\users\user\data\*.laz > c:\users\user\data\filelist.txt
     
    The mean height above ground example above would then be: @@ -611,8 +611,8 @@

    KNOWN ISSUES

    • Only one method can be applied for a single run and multiple map output from a single run - (e.g. method=string[,string,...] output=name[,name,...] - or n=string mean=string) is no supported. + (e.g. method=string[,string,...] output=name[,name,...] + or n=string mean=string) is no supported.
    If you encounter any problems (or solutions!) please contact the GRASS @@ -647,19 +647,19 @@

    REFERENCES

  • V. Petras, A. Petrasova, J. Jeziorska, H. Mitasova (2016): Processing UAV and lidar point clouds in GRASS GIS. -XXIII ISPRS Congress 2016 [ISPRS Archives, ResearchGate] +XXIII ISPRS Congress 2016 [ISPRS Archives, ResearchGate]
  • -ASPRS LAS format +ASPRS LAS format
  • -PDAL - Point Data Abstraction Library +PDAL - Point Data Abstraction Library
  • AUTHORS

    Markus Metz
    Vaclav Petras, -NCSU GeoForAll Lab +NCSU GeoForAll Lab (base_raster option, documentation),
    Maris Nartiss, LU GZZF (refactoring, additional filters, custom dimension support) diff --git a/raster/r.in.pdal/testsuite/test_r_in_pdal_print.py b/raster/r.in.pdal/testsuite/test_r_in_pdal_print.py new file mode 100644 index 00000000000..2c613ab88e2 --- /dev/null +++ b/raster/r.in.pdal/testsuite/test_r_in_pdal_print.py @@ -0,0 +1,113 @@ +""" +Name: r.in.pdal info printing and error handling tests +Purpose: Validates output of LAS file property printing and handling + of broken LAS files + +Author: Maris Nartiss +Copyright: (C) 2024 by Maris Nartiss and the GRASS Development Team +Licence: This program is free software under the GNU General Public + License (>=v2). Read the file COPYING that comes with GRASS + for details. +""" + +import os +import pathlib +import shutil +import unittest +from tempfile import TemporaryDirectory + +from grass.script import core as grass +from grass.script import read_command +from grass.gunittest.case import TestCase +from grass.gunittest.main import test + + +class InfoTest(TestCase): + """ + Test printing of extent and metadata + + This test requires pdal CLI util to be available. + """ + + @classmethod + @unittest.skipIf(shutil.which("pdal") is None, "Cannot find pdal utility") + def setUpClass(cls): + """Ensures expected computational region and generated data""" + cls.use_temp_region() + cls.runModule("g.region", n=18, s=0, e=18, w=0, res=6) + + cls.data_dir = os.path.join(pathlib.Path(__file__).parent.absolute(), "data") + cls.point_file = os.path.join(cls.data_dir, "points.csv") + cls.tmp_dir = TemporaryDirectory() + cls.las_file = os.path.join(cls.tmp_dir.name, "points.las") + grass.call( + [ + "pdal", + "translate", + "-i", + cls.point_file, + "-o", + cls.las_file, + "-r", + "text", + "-w", + "las", + "--writers.las.format=0", + "--writers.las.extra_dims=all", + "--writers.las.minor_version=4", + ] + ) + cls.broken_las = os.path.join(cls.tmp_dir.name, "broken.las") + pathlib.Path(cls.broken_las).write_bytes(b"LASF") + + @classmethod + def tearDownClass(cls): + """Remove the temporary region and generated data""" + cls.tmp_dir.cleanup() + cls.del_temp_region() + + @unittest.skipIf(shutil.which("r.in.pdal") is None, "Cannot find r.in.pdal") + def test_extent_bad(self): + """A broken LAS file should result in an error""" + self.assertModuleFail("r.in.pdal", input=self.broken_las, flags="g", quiet=True) + + @unittest.skipIf(shutil.which("r.in.pdal") is None, "Cannot find r.in.pdal") + def test_info_bad(self): + """A broken LAS file should result in an error""" + self.assertModuleFail("r.in.pdal", input=self.broken_las, flags="p", quiet=True) + + @unittest.skipIf(shutil.which("r.in.pdal") is None, "Cannot find r.in.pdal") + def test_extent_good(self): + """Reported extent should match provided data""" + out = read_command("r.in.pdal", input=self.las_file, flags="g", quiet=True) + for kvp in out.strip().split(" "): + key, value = kvp.split("=") + if key == "n": + self.assertAlmostEqual(float(value), 17, places=6) + continue + if key == "s": + self.assertAlmostEqual(float(value), 1, places=6) + continue + if key == "e": + self.assertAlmostEqual(float(value), 17, places=6) + continue + if key == "w": + self.assertAlmostEqual(float(value), 1, places=6) + continue + if key == "t": + self.assertAlmostEqual(float(value), 28, places=6) + continue + if key == "b": + self.assertAlmostEqual(float(value), 1, places=6) + + @unittest.skipIf(shutil.which("r.in.pdal") is None, "Cannot find r.in.pdal") + def test_info_good(self): + """Validate successful file info printing""" + out = read_command("r.in.pdal", input=self.las_file, flags="p", quiet=True) + self.assertIn("File version = 1.4", out) + self.assertIn("File signature: LASF", out) + self.assertIn("Point count: 53", out) + + +if __name__ == "__main__": + test() diff --git a/raster/r.in.poly/r.in.poly.html b/raster/r.in.poly/r.in.poly.html index 71673f807a5..3860f414f7c 100644 --- a/raster/r.in.poly/r.in.poly.html +++ b/raster/r.in.poly/r.in.poly.html @@ -8,7 +8,7 @@

    DESCRIPTION

    The input file is an ASCII text file containing the polygon, linear, and point feature definitions. The format of this file is described in the -INPUT FORMAT section below. +INPUT FORMAT section below.

    The number of raster rows to hold in memory is per default 4096. @@ -31,7 +31,6 @@

    NOTES

    Polygons are filled, i.e. they define an area. -

    Input Format

    The input format for the input file consists of @@ -57,20 +56,20 @@

    Input Format

    -The A signals the beginning of a filled polygon. +The A signals the beginning of a filled polygon. It must appear in the first column. -The L signals the beginning of a linear feature. +The L signals the beginning of a linear feature. It also must appear in the first column. -The P signals the beginning of a single cell point feature. +The P signals the beginning of a single cell point feature. Again, it must appear in the first column. The coordinates of the vertices of the polygon, or the coordinates defining the linear or point feature follow and must have a space in the first column and at least one space between the easting and the northing. To give meaning to the features, the -"=" indicates that the feature currently being +"=" indicates that the feature currently being processed has category value cat# (which must be an integer) and a label (which may be more than one word, or which may be omitted). diff --git a/raster/r.in.xyz/r.in.xyz.html b/raster/r.in.xyz/r.in.xyz.html index 42e5e1ee7f8..55c04e74a2d 100644 --- a/raster/r.in.xyz/r.in.xyz.html +++ b/raster/r.in.xyz/r.in.xyz.html @@ -41,9 +41,9 @@

    DESCRIPTION

      -
    • Variance and derivatives use the biased estimator (n). [subject to change] +
    • Variance and derivatives use the biased estimator (n). [subject to change]
    • Coefficient of variance is given in percentage and defined as -(stddev/mean)*100. +(stddev/mean)*100.

    @@ -78,8 +78,8 @@

    Memory use

    will use a large amount of system memory for large raster regions (10000x10000). If the module refuses to start complaining that there isn't enough memory, use the percent parameter to run the module in several passes. -In addition using a less precise map format (CELL [integer] or -FCELL [floating point]) will use less memory than a DCELL +In addition using a less precise map format (CELL [integer] or +FCELL [floating point]) will use less memory than a DCELL [double precision floating point] output map. Methods such as n, min, max, sum will also use less memory, while stddev, variance, and coeff_var will use more. @@ -91,9 +91,9 @@

    Memory use

    but for the aggregate fns it will also depend on the number of data points. (?) -->

    -The default map type=FCELL is intended as compromise between +The default map type=FCELL is intended as compromise between preserving data precision and limiting system resource consumption. -If reading data from a stdin stream, the program can only run using +If reading data from a stdin stream, the program can only run using a single pass.

    Setting region bounds and resolution

    @@ -122,15 +122,15 @@

    Setting region bounds and resolution

    If you only intend to interpolate the data with r.to.vect and v.surf.rst, then there is little point to setting the region resolution so fine that you only catch one data point per cell -- you might -as well use "v.in.ascii -zbt" directly. +as well use "v.in.ascii -zbt" directly.

    Filtering

    Points falling outside the current region will be skipped. This includes points falling exactly on the southern region bound. -(to capture those adjust the region with "g.region s=s-0.000001"; +(to capture those adjust the region with "g.region s=s-0.000001"; see g.region) -

    Blank lines and comment lines starting with the hash symbol (#) +

    Blank lines and comment lines starting with the hash symbol (#) will be skipped.

    @@ -147,7 +147,7 @@

    Filtering

    The user can use a combination of r.in.xyz output maps to create -custom filters. e.g. use r.mapcalc to create a mean-(2*stddev) +custom filters. e.g. use r.mapcalc to create a mean-(2*stddev) map. [In this example the user may want to include a lower bound filter in r.mapcalc to remove highly variable points (small n) or run r.neighbors to smooth the stddev map before further use.] @@ -193,7 +193,7 @@

    Import of x,y,string data

    r.in.xyz is expecting numeric values as z column. In order to perform a occurrence count operation even on x,y data with non-numeric attribute(s), the data can be imported using either the x or y -coordinate as a fake z column for method=n (count +coordinate as a fake z column for method=n (count number of points per grid cell), the z values are ignored anyway.

    EXAMPLES

    @@ -212,7 +212,7 @@

    Import of x,y,z ASCII into DEM

    @@ -287,16 +287,16 @@ 

    TODO

    • Support for multiple map output from a single run.
      - method=string[,string,...] output=name[,name,...]
      + method=string[,string,...] output=name[,name,...]
      This can be easily handled by a wrapper script, with the added - benefit of it being very simple to parallelize that way. + benefit of it being very simple to parallelize that way.

    KNOWN ISSUES

      -
    • "nan" can leak into coeff_var maps. -
      Cause unknown. Possible work-around: "r.null setnull=nan" +
    • "nan" can leak into coeff_var maps. +
      Cause unknown. Possible work-around: "r.null setnull=nan"
    diff --git a/raster/r.info/r.info.html b/raster/r.info/r.info.html index 8cc7b1a429f..b32f11ce6b1 100644 --- a/raster/r.info/r.info.html +++ b/raster/r.info/r.info.html @@ -69,7 +69,7 @@

    EXAMPLES

    | Comments: | | slope map elev = elev_ned10m | | zfactor = 1.00 format = degrees | - | min_slope = 0.000000 | + | min_slope = 0.000000 | | | +----------------------------------------------------------------------------+
    @@ -78,7 +78,7 @@

    EXAMPLES

    subset of the available information by passing various flags to the module:

    -Output in shell script style, useful for eval (eval `r.info -g slope`): +Output in shell script style, useful for eval (eval `r.info -g slope`):

     r.info -g slope
    diff --git a/raster/r.kappa/r.kappa.html b/raster/r.kappa/r.kappa.html
    index b9a57ec0915..ad7c77a2b65 100644
    --- a/raster/r.kappa/r.kappa.html
    +++ b/raster/r.kappa/r.kappa.html
    @@ -116,7 +116,7 @@ 

    NOTES

    "On the performance of Matthews correlation coefficient (MCC) for imbalanced dataset". -

    EXAMPLE

    +

    EXAMPLE

    Example for North Carolina sample dataset: diff --git a/raster/r.lake/r.lake.html b/raster/r.lake/r.lake.html index ee18eb6d8cc..d70d1297e6f 100644 --- a/raster/r.lake/r.lake.html +++ b/raster/r.lake/r.lake.html @@ -1,7 +1,7 @@

    DESCRIPTION

    -

    The module fills a lake to a target water level from a given start point. The user -can think of it as r.grow with additional +r.lake fills a lake to a target water level from a given start point. The user +can think of it as r.grow with additional checks for elevation. The resulting raster map contains cells with values representing lake depth and NULL for all other cells beyond the lake. Lake depth is reported relative to specified water level @@ -44,30 +44,30 @@

    NOTES

    r.mapcalc equivalent - for GRASS hackers

    This module was initially created as a script using -r.mapcalc. +r.mapcalc. This had some limitations - it was slow and no checks where done to find out required iteration count. The shell script -code (using r.mapcalc) used +code (using r.mapcalc) used in the original script is shown below:
     ${seedmap} = if( ${dem}, \
    -if( if( isnull(${seedmap}),0,${seedmap}>0), ${wlevel}-${dem}, \
    +if( if( isnull(${seedmap}),0,${seedmap} > 0), ${wlevel}-${dem}, \
      if( \
    -  if(isnull(${seedmap}[-1,0]),0, ${seedmap}[-1,0]>0 && ${wlevel}>${dem}) ||\
    -  if(isnull(${seedmap}[-1,1]),0, ${seedmap}[-1,1]>0 && ${wlevel}>${dem}) ||\
    -  if(isnull(${seedmap}[0,1]), 0, ${seedmap}[0,1]>0  && ${wlevel}>${dem}) ||\
    -  if(isnull(${seedmap}[1,1]), 0, ${seedmap}[1,1]>0  && ${wlevel}>${dem}) ||\
    -  if(isnull(${seedmap}[1,0]), 0, ${seedmap}[1,0]>0  && ${wlevel}>${dem}) ||\
    -  if(isnull(${seedmap}[1,-1]),0, ${seedmap}[1,-1]>0 && ${wlevel}>${dem}) ||\
    -  if(isnull(${seedmap}[0,-1]),0, ${seedmap}[0,-1]>0 && ${wlevel}>${dem}) ||\
    -  if(isnull(${seedmap}[-1,-1]),0, ${seedmap}[-1,-1]>0 && ${wlevel}>${dem}),\
    +  if(isnull(${seedmap}[-1,0]),0, ${seedmap}[-1,0] > 0 && ${wlevel} > ${dem}) ||\
    +  if(isnull(${seedmap}[-1,1]),0, ${seedmap}[-1,1] > 0 && ${wlevel} > ${dem}) ||\
    +  if(isnull(${seedmap}[0,1]), 0, ${seedmap}[0,1] > 0  && ${wlevel} > ${dem}) ||\
    +  if(isnull(${seedmap}[1,1]), 0, ${seedmap}[1,1] > 0  && ${wlevel} > ${dem}) ||\
    +  if(isnull(${seedmap}[1,0]), 0, ${seedmap}[1,0] > 0  && ${wlevel} > ${dem}) ||\
    +  if(isnull(${seedmap}[1,-1]),0, ${seedmap}[1,-1] > 0 && ${wlevel} > ${dem}) ||\
    +  if(isnull(${seedmap}[0,-1]),0, ${seedmap}[0,-1] > 0 && ${wlevel} > ${dem}) ||\
    +  if(isnull(${seedmap}[-1,-1]),0, ${seedmap}[-1,-1] > 0 && ${wlevel} > ${dem}),\
      ${wlevel}-${dem}, null() )))
     

    -The ${seedmap} variable is replaced by seed map names, ${dem} -with DEM map name, and ${wlevel} with target water level. To get +The ${seedmap} variable is replaced by seed map names, ${dem} +with DEM map name, and ${wlevel} with target water level. To get single water level, this code block is called with same level numerous times (in a loop) as the lake grows by single cells during single run. @@ -77,7 +77,7 @@

    KNOWN ISSUES

    • The entire map is loaded into RAM.
    • A completely negative seed map will not work! At least one cell must have - a value > 0. Output from r.lake -n cannot be used + a value > 0. Output from r.lake -n cannot be used as input in the next run.
    @@ -100,7 +100,7 @@

    EXAMPLE

    -
    +
    Small flooding along a street (r.lake, using Lidar 1m DEM)
    diff --git a/raster/r.li/r.li.cwed/r.li.cwed.html b/raster/r.li/r.li.cwed/r.li.cwed.html index 0fe92bd560e..88ac06f2ad7 100644 --- a/raster/r.li/r.li.cwed/r.li.cwed.html +++ b/raster/r.li/r.li.cwed/r.li.cwed.html @@ -12,12 +12,12 @@

    DESCRIPTION

    landscape between patch types i and k
  • dik: dissimilarity (edge contrast weight) between patch types i and k
  • -
  • Area: total landscape area
    -
    +
  • Area: total landscape area
  • + The input file contains a row for each couple of patch type that we want to consider in the calculation. Each row must be saved using this syntax:
    -patchType1,patchType2,dissimilarityBetweenPatchType1andPatchType2
    +patchType1,patchType2,dissimilarityBetweenPatchType1andPatchType2

    NOTES

    @@ -26,8 +26,8 @@

    NOTES

    If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

    If the sample area contains only NULL values it is considered to @@ -60,7 +60,7 @@

    EXAMPLES

    SEE ALSO

    -r.li - package overview
    +r.li (package overview), g.gui.rlisetup
    @@ -68,10 +68,10 @@

    REFERENCES

    McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

    AUTHORS

    -Serena Pallecchi student of Computer Science University of Pisa (Italy).
    +Serena Pallecchi, student of Computer Science University of Pisa (Italy).
    Commission from Faunalia Pontedera (PI), Italy (www.faunalia.it)
    Markus Metz diff --git a/raster/r.li/r.li.daemon/r.li.daemon.html b/raster/r.li/r.li.daemon/r.li.daemon.html index d2640a03fba..13f190df219 100644 --- a/raster/r.li/r.li.daemon/r.li.daemon.html +++ b/raster/r.li/r.li.daemon/r.li.daemon.html @@ -24,35 +24,35 @@

    DESCRIPTION

    To write a new index only two steps are needed:

      -
    1. - Define a function and insert its declaration on file index.h in r.li.daemon - folder, which contains all index declarations. This function must be of this kind: -
      +    
    2. + Define a function and insert its declaration on file index.h in r.li.daemon + folder, which contains all index declarations. This function must be of this kind: +
               int index(int fd, char ** par, area_des ad, double * result)
      -	
      - where:
        -
      • fd is the raster map descriptor -
      • par is a matrix for special parameter (like argv in main) -
      • ad is the area descriptor -
      • result is where to put the index calculation result -
      - This function has to return 1 on success and 0 otherwise. - This function type is defined using typedef named rli_func. -
    3. - Create a main for command line arguments parsing, and call the function -
      +    
      + where:
        +
      • fd is the raster map descriptor
      • +
      • par is a matrix for special parameter (like argv in main)
      • +
      • ad is the area descriptor
      • +
      • result is where to put the index calculation result
      • +
      + This function has to return 1 on success and 0 otherwise. + This function type is defined using typedef named rli_func.
    4. +
    5. + Create a main for command line arguments parsing, and call the function +
               int calculateIndex(char *file, rli_func *f,
                                  char **parameters, char *raster, char *output);
      -	
      - from the r.li library, for starting raster analysis.
      - It follows the meaning of parameters: -
        -
      • file name of configuration file created using g.gui.rlisetup -
      • f pointer to index function defined above -
      • parameters pointer to index special parameters -
      • raster name of raster to use -
      • output output file name -
      +
    6. + from the r.li library, for starting raster analysis.
      + It follows the meaning of parameters: +
        +
      • file name of configuration file created using g.gui.rlisetup
      • +
      • f pointer to index function defined above
      • +
      • parameters pointer to index special parameters
      • +
      • raster name of raster to use
      • +
      • output output file name
      • +
    Compile it using a changed Makefile based on the file for r.li.patchdensity. @@ -82,7 +82,7 @@

    REFERENCES

    McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

    AUTHORS

    diff --git a/raster/r.li/r.li.dominance/r.li.dominance.html b/raster/r.li/r.li.dominance/r.li.dominance.html index 13bef4620f4..15679874171 100644 --- a/raster/r.li/r.li.dominance/r.li.dominance.html +++ b/raster/r.li/r.li.dominance/r.li.dominance.html @@ -17,8 +17,8 @@

    NOTES

    If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

    If the input raster map contains only NULL values then @@ -37,7 +37,7 @@

    EXAMPLES

    Forest map (Spearfish sample dataset) example:
     g.region raster=landcover.30m -p
    -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
    +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
     r.li.dominance input=forests conf=movwindow7 out=forests_dominance_mov7
     r.univar forests_dominance_mov7
     
    @@ -60,7 +60,7 @@

    EXAMPLES

    SEE ALSO

    -r.li - package overview
    +r.li (package overview), g.gui.rlisetup
    @@ -68,10 +68,10 @@

    REFERENCES

    McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

    AUTHORS

    -Serena Pallecchi student of Computer Science University of Pisa (Italy).
    +Serena Pallecchi, student of Computer Science University of Pisa (Italy).
    Commission from Faunalia Pontedera (PI), Italy (www.faunalia.it)
    Markus Metz diff --git a/raster/r.li/r.li.edgedensity/r.li.edgedensity.html b/raster/r.li/r.li.edgedensity/r.li.edgedensity.html index 229a3e28686..840a0cf9650 100644 --- a/raster/r.li/r.li.edgedensity/r.li.edgedensity.html +++ b/raster/r.li/r.li.edgedensity/r.li.edgedensity.html @@ -1,14 +1,14 @@

    DESCRIPTION

    -r.li.edgedensity calculates:
    -
    +r.li.edgedensity calculates: +
      -
    • the density of all edges of patch type k - r.li.edgedensity formula 1 - , or -
    • the density of all edges in the sampling area if k is - not specified, - r.li.edgedensity formula 2 +
    • the density of all edges of patch type k +r.li.edgedensity formula 1 +, or
    • +
    • the density of all edges in the sampling area if k is +not specified, +r.li.edgedensity formula 2

    with:

      @@ -29,8 +29,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the input raster map contains only NULL values then r.li.edgedensity @@ -58,7 +58,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.edgedensity input=forests conf=movwindow7 out=forests_edgedens_mov7
       r.univar forests_edgedens_mov7
       
      @@ -81,7 +81,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -89,10 +89,10 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      -Serena Pallecchi student of Computer Science University of Pisa (Italy).
      +Serena Pallecchi, student of Computer Science University of Pisa (Italy).
      Commission from Faunalia Pontedera (PI), Italy (www.faunalia.it)
      Markus Metz diff --git a/raster/r.li/r.li.html b/raster/r.li/r.li.html index bfe7c3498d0..37e40d6cfc7 100644 --- a/raster/r.li/r.li.html +++ b/raster/r.li/r.li.html @@ -38,12 +38,12 @@

      NOTES

      1. run g.gui.rlisetup: create a configuration file selecting the parts of raster map to be analyzed. This file allows re-running - an analysis easily. It is stored on Windows in the directory C:\Users\userxy\AppData\Roaming\GRASS8\r.li\, on GNU/Linux in - $HOME/.grass8/r.li/. + an analysis easily. It is stored on Windows in the directory C:\Users\userxy\AppData\Roaming\GRASS8\r.li\, on GNU/Linux in + $HOME/.grass8/r.li/.
      2. run one or more of the r.li.[index] modules (e.g., r.li.patchdensity) to calculate the selected index - using on the areas selected on configuration file. + using on the areas selected on configuration file.
      If the input raster map contains only NULL values then r.li.mpa considers to @@ -45,7 +45,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.mpa input=forests conf=movwindow7 out=forests_mpa_mov7
       r.univar forests_mpa_mov7
       
      @@ -68,7 +68,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -76,10 +76,10 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      -Serena Pallecchi student of Computer Science University of Pisa (Italy).
      +Serena Pallecchi, student of Computer Science University of Pisa (Italy).
      Commission from Faunalia Pontedera (PI), Italy (www.faunalia.it)
      Markus Metz diff --git a/raster/r.li/r.li.mps/r.li.mps.html b/raster/r.li/r.li.mps/r.li.mps.html index c5713402a37..e7548a9db76 100644 --- a/raster/r.li/r.li.mps/r.li.mps.html +++ b/raster/r.li/r.li.mps/r.li.mps.html @@ -28,8 +28,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the sample area contains only NULL value cells, r.li.mps returns 0 (zero).
      @@ -47,7 +47,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.mps input=forests conf=movwindow7 out=forests_mps_mov7
       r.univar forests_mps_mov7
       
      @@ -70,7 +70,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -78,7 +78,7 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      diff --git a/raster/r.li/r.li.padcv/r.li.padcv.html b/raster/r.li/r.li.padcv/r.li.padcv.html index fde03942d3d..23aed65eedd 100644 --- a/raster/r.li/r.li.padcv/r.li.padcv.html +++ b/raster/r.li/r.li.padcv/r.li.padcv.html @@ -19,8 +19,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      The result is NULL if the sample area contains only NULL values. @@ -37,7 +37,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.padcv input=forests conf=movwindow7 out=forests_padcv_mov7
       r.univar forests_padcv_mov7
       
      @@ -60,7 +60,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -68,7 +68,7 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      diff --git a/raster/r.li/r.li.padrange/r.li.padrange.html b/raster/r.li/r.li.padrange/r.li.padrange.html index 59ac2340ce1..6502c137f1e 100644 --- a/raster/r.li/r.li.padrange/r.li.padrange.html +++ b/raster/r.li/r.li.padrange/r.li.padrange.html @@ -20,8 +20,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the sample area contains only NULL values then r.li.padrange returns NULL.
      @@ -39,7 +39,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.padrange input=forests conf=movwindow7 out=forests_padrange_mov7
       r.univar forests_padrange_mov7
       
      @@ -62,7 +62,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -70,7 +70,7 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      diff --git a/raster/r.li/r.li.padsd/r.li.padsd.html b/raster/r.li/r.li.padsd/r.li.padsd.html index 49dda1e08f5..379049c9506 100644 --- a/raster/r.li/r.li.padsd/r.li.padsd.html +++ b/raster/r.li/r.li.padsd/r.li.padsd.html @@ -21,8 +21,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      The result is NULL if the sample area contains only NULL values. @@ -39,7 +39,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.padsd input=forests conf=movwindow7 out=forests_padsd_mov7
       r.univar forests_padsd_mov7
       
      @@ -62,7 +62,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -70,7 +70,7 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      diff --git a/raster/r.li/r.li.patchdensity/r.li.patchdensity.html b/raster/r.li/r.li.patchdensity/r.li.patchdensity.html index c351d43d697..168a45d9772 100644 --- a/raster/r.li/r.li.patchdensity/r.li.patchdensity.html +++ b/raster/r.li/r.li.patchdensity/r.li.patchdensity.html @@ -23,8 +23,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      A sample area of only NULL values is considered to have zero patches, that is, the result is always ≥ 0. @@ -44,7 +44,7 @@

      EXAMPLES

      g.region raster=landcover.30m -p # extract forested areas: r.category landcover.30m -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43, 1, null())" +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43, 1, null())" # patch density (7x7 moving window defined in g.gui.rlisetup): r.li.patchdensity forests conf=movwindow7 out=forests_p_dens7 @@ -73,7 +73,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -81,7 +81,7 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      diff --git a/raster/r.li/r.li.patchnum/r.li.patchnum.html b/raster/r.li/r.li.patchnum/r.li.patchnum.html index b531cc261ec..743f965b5b8 100644 --- a/raster/r.li/r.li.patchnum/r.li.patchnum.html +++ b/raster/r.li/r.li.patchnum/r.li.patchnum.html @@ -1,7 +1,7 @@

      DESCRIPTION

      r.li.patchnum calculates the "patch number index" as:
      - f(sample_area)= Patch_Number
      + f(sample_area)= Patch_Number

      This index is calculated using a 4 neighbour algorithm, diagonal cells @@ -14,8 +14,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the sample area contains only NULL values then it is considered to have zero patches.
      @@ -33,7 +33,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.patchnum input=forests conf=movwindow7 out=forests_patchnum_mov7
       r.univar forests_patchnum_mov7
       
      @@ -56,7 +56,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -64,7 +64,7 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      diff --git a/raster/r.li/r.li.pielou/r.li.pielou.html b/raster/r.li/r.li.pielou/r.li.pielou.html index 698838513d9..f8f9e2730fb 100644 --- a/raster/r.li/r.li.pielou/r.li.pielou.html +++ b/raster/r.li/r.li.pielou/r.li.pielou.html @@ -16,8 +16,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the input raster contains NULL value cells, r.li.pielou @@ -36,7 +36,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.pielou input=forests conf=movwindow7 out=forests_pielou_mov7
       r.univar forests_pielou_mov7
       
      @@ -59,7 +59,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -67,9 +67,9 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      Luca Delucchi and Duccio Rocchini, Fondazione E. Mach (Italy), based on the r.li.shannon code -developed by Serena Pallecchi student of Computer Science University of Pisa (Italy) +developed by Serena Pallecchi, student of Computer Science University of Pisa (Italy) diff --git a/raster/r.li/r.li.renyi/r.li.renyi.html b/raster/r.li/r.li.renyi/r.li.renyi.html index cca618495d2..3b866356765 100644 --- a/raster/r.li/r.li.renyi/r.li.renyi.html +++ b/raster/r.li/r.li.renyi/r.li.renyi.html @@ -19,8 +19,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the input raster map contains only NULL values then r.li.renyi @@ -39,7 +39,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.renyi input=forests conf=movwindow7 out=forests_renyi_mov7_a06 alpha=0.6
       r.univar forests_renyi_mov7_a06
       
      @@ -62,7 +62,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -70,10 +70,10 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      Luca Delucchi and Duccio Rocchini, Fondazione E. Mach (Italy), based on -the r.li.shannon code developed by Serena Pallecchi student of +the r.li.shannon code developed by Serena Pallecchi, student of Computer Science University of Pisa (Italy). diff --git a/raster/r.li/r.li.richness/r.li.richness.html b/raster/r.li/r.li.richness/r.li.richness.html index 85e8516270b..32bc7f99b0c 100644 --- a/raster/r.li/r.li.richness/r.li.richness.html +++ b/raster/r.li/r.li.richness/r.li.richness.html @@ -16,8 +16,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the input raster map contains only NULL values then r.li.richness @@ -36,7 +36,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.richness input=forests conf=movwindow7 out=forests_richness_mov7
       r.univar forests_richness_mov7
       
      @@ -59,7 +59,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -67,9 +67,9 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      -Serena Pallecchi student of Computer Science University of Pisa (Italy).
      +Serena Pallecchi, student of Computer Science University of Pisa (Italy).
      Commission from Faunalia Pontedera (PI), Italy (www.faunalia.it) diff --git a/raster/r.li/r.li.shannon/r.li.shannon.html b/raster/r.li/r.li.shannon/r.li.shannon.html index 1940b31adc6..76ef70ebc25 100644 --- a/raster/r.li/r.li.shannon/r.li.shannon.html +++ b/raster/r.li/r.li.shannon/r.li.shannon.html @@ -17,8 +17,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the input raster map contains only NULL values then r.li.shannon @@ -37,7 +37,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.shannon input=forests conf=movwindow7 out=forests_shannon_mov7
       r.univar forests_shannon_mov7
       
      @@ -60,7 +60,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -68,9 +68,9 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      -Serena Pallecchi student of Computer Science University of Pisa (Italy).
      +Serena Pallecchi, student of Computer Science University of Pisa (Italy).
      Commission from Faunalia Pontedera (PI), Italy (www.faunalia.it) diff --git a/raster/r.li/r.li.shape/r.li.shape.html b/raster/r.li/r.li.shape/r.li.shape.html index 43ad1e4bfa9..5b7d7219588 100644 --- a/raster/r.li/r.li.shape/r.li.shape.html +++ b/raster/r.li/r.li.shape/r.li.shape.html @@ -18,8 +18,8 @@

      NOTES

      If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the input raster map contains only NULL values then r.li.shape @@ -38,7 +38,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.shape input=forests conf=movwindow7 out=forests_shape_mov7
       r.univar forests_shape_mov7
       
      @@ -61,7 +61,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -69,7 +69,7 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      diff --git a/raster/r.li/r.li.simpson/r.li.simpson.html b/raster/r.li/r.li.simpson/r.li.simpson.html index 2600f9648c7..fb33b7cad98 100644 --- a/raster/r.li/r.li.simpson/r.li.simpson.html +++ b/raster/r.li/r.li.simpson/r.li.simpson.html @@ -17,8 +17,8 @@

      NOTES

      file/map parameters. If the "moving window" method was selected in g.gui.rlisetup, then the output will be a raster map, otherwise an ASCII file will be generated in -the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ -(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux). +the folder C:\Users\userxy\AppData\Roaming\GRASS8\r.li\output\ +(MS-Windows) or $HOME/.grass8/r.li/output/ (GNU/Linux).

      If the input raster map contains only NULL values then r.li.simpson @@ -37,7 +37,7 @@

      EXAMPLES

      Forest map (Spearfish sample dataset) example:
       g.region raster=landcover.30m -p
      -r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
      +r.mapcalc "forests = if(landcover.30m >= 41 && landcover.30m <= 43,1,null())"
       r.li.simpson input=forests conf=movwindow7 out=forests_simpson_mov7
       r.univar forests_simpson_mov7
       
      @@ -60,7 +60,7 @@

      EXAMPLES

      SEE ALSO

      -r.li - package overview
      +r.li (package overview), g.gui.rlisetup
      @@ -68,9 +68,9 @@

      REFERENCES

      McGarigal, K., and B. J. Marks. 1995. FRAGSTATS: spatial pattern analysis program for quantifying landscape structure. USDA For. Serv. -Gen. Tech. Rep. PNW-351. (PDF) +Gen. Tech. Rep. PNW-351. (PDF)

      AUTHORS

      -Serena Pallecchi student of Computer Science University of Pisa (Italy).
      +Serena Pallecchi, student of Computer Science University of Pisa (Italy).
      Commission from Faunalia Pontedera (PI), Italy (www.faunalia.it) diff --git a/raster/r.mapcalc/r.mapcalc.html b/raster/r.mapcalc/r.mapcalc.html index 5089b78537a..5f943f2abae 100644 --- a/raster/r.mapcalc/r.mapcalc.html +++ b/raster/r.mapcalc/r.mapcalc.html @@ -414,8 +414,8 @@

      Data types and their precision

    Note that the value counter wraps around when the value overflows its range. -E.g., if your expression is a = int(2147483648), you will get NULL -value. For expression a = int(2147483649), you will reach the lowest +E.g., if your expression is a = int(2147483648), you will get NULL +value. For expression a = int(2147483649), you will reach the lowest value possible instead, i.e. -2147483647.

    Floating point values in the expression

    @@ -445,42 +445,42 @@

    Floating point values in the expression

    NULL support

      -
    • Division by zero should result in NULL. -
    • Modulus by zero should result in NULL. +
    • Division by zero should result in NULL.
    • +
    • Modulus by zero should result in NULL.
    • NULL-values in any arithmetic or logical operation should result -in NULL. (however, &&& and ||| are treated specially, as described below). +in NULL. (however, &&& and ||| are treated specially, as described below).
    • The &&& and ||| operators observe the following axioms even when x is NULL:
      -	x &&& false == false
      -	false &&& x == false
      -	x ||| true == true
      -	true ||| x == true
      -
      + x &&& false == false + false &&& x == false + x ||| true == true + true ||| x == true +
    • NULL-values in function arguments should result in NULL (however, -if(), eval() and isnull() are treated specially, as described below). -
    • The eval() function always returns its last argument +if(), eval() and isnull() are treated specially, as described below).
    • +
    • The eval() function always returns its last argument
    • The situation for if() is:
       if(x)
      -	NULL if x is NULL; 0 if x is zero; 1 otherwise
      +    NULL if x is NULL; 0 if x is zero; 1 otherwise
       if(x,a)
      -	NULL if x is NULL; a if x is non-zero; 0 otherwise
      +    NULL if x is NULL; a if x is non-zero; 0 otherwise
       if(x,a,b)
      -	NULL if x is NULL; a if x is non-zero; b otherwise
      +    NULL if x is NULL; a if x is non-zero; b otherwise
       if(x,n,z,p)
      -	NULL if x is NULL; n if x is negative;
      +    NULL if x is NULL; n if x is negative;
       z if x is zero; p if x is positive
      -
      +
    • The (new) function isnull(x) returns: 1 if x is NULL; 0 otherwise. The (new) function null() -(which has no arguments) returns an integer NULL. +(which has no arguments) returns an integer NULL.
    • Non-NULL, but invalid, arguments to functions should result in NULL.
       Examples:
       log(-2)
       sqrt(-2)
       pow(a,b) where a is negative and b is not an integer
      -
      +

    NULL support: Please note that any math performed with NULL cells always results in a NULL value for these cells. If you want to replace a NULL cell @@ -493,28 +493,28 @@

    NULL support

    NULL and conditions:

    For the one argument form:

    -if(x) = NULL		if x is NULL
    -if(x) = 0		if x = 0
    -if(x) = 1		otherwise (i.e. x is neither NULL nor 0).
    +if(x) = NULL        if x is NULL
    +if(x) = 0        if x = 0
    +if(x) = 1        otherwise (i.e. x is neither NULL nor 0).
     

    For the two argument form:

    -if(x,a) = NULL		if x is NULL
    -if(x,a) = 0		if x = 0
    -if(x,a) = a		otherwise (i.e. x is neither NULL nor 0).
    +if(x,a) = NULL        if x is NULL
    +if(x,a) = 0        if x = 0
    +if(x,a) = a        otherwise (i.e. x is neither NULL nor 0).
     

    For the three argument form:

    -if(x,a,b) = NULL	if x is NULL
    -if(x,a,b) = b		if x = 0
    -if(x,a,b) = a		otherwise (i.e. x is neither NULL nor 0).
    +if(x,a,b) = NULL    if x is NULL
    +if(x,a,b) = b        if x = 0
    +if(x,a,b) = a        otherwise (i.e. x is neither NULL nor 0).
     

    For the four argument form:

    -if(x,a,b,c) = NULL	if x is NULL
    -if(x,a,b,c) = a		if x > 0
    -if(x,a,b,c) = b		if x = 0
    -if(x,a,b,c) = c		if x < 0
    +if(x,a,b,c) = NULL    if x is NULL
    +if(x,a,b,c) = a        if x > 0
    +if(x,a,b,c) = b        if x = 0
    +if(x,a,b,c) = c        if x < 0
     
    More generally, all operators and most functions return NULL if *any* of their arguments are NULL. @@ -528,8 +528,8 @@

    NULL support

    and 4 argument forms of if() return NULL if the "selected" argument is NULL, e.g.:
    -if(0,a,b) = b	regardless of whether a is NULL
    -if(1,a,b) = a	regardless of whether b is NULL
    +if(0,a,b) = b    regardless of whether a is NULL
    +if(1,a,b) = a    regardless of whether b is NULL
     
    eval() always returns its last argument, so it only returns NULL if the last argument is NULL. @@ -557,8 +557,8 @@

    Usage from command line

     'result = elevation * 2'
     
    -Without the quotes, the *, which has special meaning to the UNIX shell, -would be altered and r.mapcalc would see something other than the *. +Without the quotes, the *, which has special meaning to the UNIX shell, +would be altered and r.mapcalc would see something other than the *.

    Multiple computations

    @@ -571,7 +571,7 @@

    Multiple computations

    use:

    -	r.mapcalc <<EOF
    +    r.mapcalc <<EOF
             $GIS_OPT_OUTPUT.r = r#$GIS_OPT_FIRST * .$GIS_OPT_PERCENT + (1.0 - .$GIS_OPT_PERCENT) * r#$GIS_OPT_SECOND
             $GIS_OPT_OUTPUT.g = g#$GIS_OPT_FIRST * .$GIS_OPT_PERCENT + (1.0 - .$GIS_OPT_PERCENT) * g#$GIS_OPT_SECOND
             $GIS_OPT_OUTPUT.b = b#$GIS_OPT_FIRST * .$GIS_OPT_PERCENT + (1.0 - .$GIS_OPT_PERCENT) * b#$GIS_OPT_SECOND
    @@ -583,7 +583,7 @@ 

    Backwards compatibility

    For the backwards compatibility with GRASS 6, -if no options are given, it manufactures file=- (which reads from +if no options are given, it manufactures file=- (which reads from stdin), so you can continue to use e.g.:
     r.mapcalc < file
    @@ -594,7 +594,7 @@ 

    Backwards compatibility

    foo = 1 EOF
    -But unless you need compatibility with previous GRASS GIS versions, use file= +But unless you need compatibility with previous GRASS GIS versions, use file= explicitly, as stated above.

    When the map name contains uppercase letter(s) or a dot which are not @@ -653,7 +653,7 @@

    Raster MASK handling

    eval function

    If the output of the computation should be only one map but the expression is so complex that it is better to split it -to several expressions, the eval function can be used: +to several expressions, the eval function can be used:
     r.mapcalc << EOF
     eval(elev_200 = elevation - 200, \
    @@ -662,23 +662,23 @@ 

    eval function

    elevation_result = (0.5 * elev_200) + 0.8 * elev_p EOF
    -This example uses unix-like << EOF syntax to provide +This example uses unix-like << EOF syntax to provide input to r.mapcalc.

    Note that the temporary variables (maps) are not created and thus it does not matter whether they exists or not. -In the example above, if map elev_200 exists it will not be +In the example above, if map elev_200 exists it will not be overwritten and no error will be generated. -The reason is that the name elev_200 now denotes the temporary +The reason is that the name elev_200 now denotes the temporary variable (map) and not the existing map. -The following parts of the expression will use the temporary elev_200 -and the existing elev_200 will be left intact and will not be used. +The following parts of the expression will use the temporary elev_200 +and the existing elev_200 will be left intact and will not be used. If a user want to use the existing map, the name of the temporary variable (map) must be changed.

    Using the same map for input and output results

    A map cannot be used both as an input and as an output as in -this invalid expression oldmap = oldmap + 1, instead +this invalid expression oldmap = oldmap + 1, instead a subsequent rename using g.rename is needed when the same name is desired: @@ -735,7 +735,7 @@

    EXAMPLES

    -To change all values below 5 to NULL: +To change all values below 5 to NULL, keep 5 otherwise:

     newmap = if(map<5, null(), 5)
     
    @@ -797,7 +797,7 @@

    KNOWN ISSUES

    Any maps generated by a r.mapcalc command only exist after the entire command has completed. All maps are generated concurrently, row-by-row (i.e. there is an implicit "for row in rows {...}" around the entire expression). -Thus the #, @, and [ ] operators cannot be used on a map +Thus the #, @, and [ ] operators cannot be used on a map generated within same r.mapcalc command run. Consequently, the following (strikethrough code) does not work: @@ -806,11 +806,11 @@

    KNOWN ISSUES

    othermap = newmap[-1, 0] / newmap[1, 0] -

    Continuation lines must end with a \ and have no trailing +

    Continuation lines must end with a \ and have no trailing white space (blanks or tabs). If the user does leave white space at the end of continuation lines, the error messages produced by r.mapcalc will be meaningless and the equation will not work as the user intended. -This is particularly important for the eval() function. +This is particularly important for the eval() function.

    Currently, there is no comment mechanism in r.mapcalc. Perhaps adding a capability that would cause the entire line to be @@ -820,7 +820,7 @@

    KNOWN ISSUES

    of simply a blank line. This would make separation of multiple scripts separable by white space.

    r.mapcalc does not print a warning in case of operations on -NULL cells. It is left to the user to utilize the isnull() function. +NULL cells. It is left to the user to utilize the isnull() function.

    REFERENCES

    diff --git a/raster/r.mapcalc/r3.mapcalc.html b/raster/r.mapcalc/r3.mapcalc.html index cbb0b57e25d..742f5974384 100644 --- a/raster/r.mapcalc/r3.mapcalc.html +++ b/raster/r.mapcalc/r3.mapcalc.html @@ -284,43 +284,44 @@

    Floating point values in the expression

    NULL support

      -
    • Division by zero should result in NULL. -
    • Modulus by zero should result in NULL. +
    • Division by zero should result in NULL.
    • +
    • Modulus by zero should result in NULL.
    • NULL-values in any arithmetic or logical operation should result -in NULL. (however, &&& and ||| are treated specially, as described below). +in NULL. (however, &&& and ||| are treated specially, as described below).
    • The &&& and ||| operators observe the following axioms even when x is NULL:
      -	x &&& false == false
      -	false &&& x == false
      -	x ||| true == true
      -	true ||| x == true
      -
      + x &&& false == false + false &&& x == false + x ||| true == true + true ||| x == true +
    • NULL-values in function arguments should result in NULL (however, -if(), eval() and isnull() are treated specially, as described below). -
    • The eval() function always returns its last argument +if(), eval() and isnull() are treated specially, as described below).
    • +
    • The eval() function always returns its last argument
    • The situation for if() is:
       if(x)
      -	NULL if x is NULL; 0 if x is zero; 1 otherwise
      +    NULL if x is NULL; 0 if x is zero; 1 otherwise
       if(x,a)
      -	NULL if x is NULL; a if x is non-zero; 0 otherwise
      +    NULL if x is NULL; a if x is non-zero; 0 otherwise
       if(x,a,b)
      -	NULL if x is NULL; a if x is non-zero; b otherwise
      +    NULL if x is NULL; a if x is non-zero; b otherwise
       if(x,n,z,p)
      -	NULL if x is NULL; n if x is negative;
      +    NULL if x is NULL; n if x is negative;
       z if x is zero; p if x is positive
      -
      +
    • The (new) function isnull(x) returns: 1 if x is NULL; 0 otherwise. The (new) function null() -(which has no arguments) returns an integer NULL. +(which has no arguments) returns an integer NULL.
    • Non-NULL, but invalid, arguments to functions should result in NULL.
       Examples:
       log(-2)
       sqrt(-2)
       pow(a,b) where a is negative and b is not an integer
      -
      +
    +

    NULL support: Please note that any math performed with NULL cells always results in a NULL value for these cells. If you want to replace a NULL cell on-the-fly, use the isnull() test function in a if-statement. @@ -332,28 +333,28 @@

    NULL support

    NULL and conditions:

    For the one argument form:

    -if(x) = NULL		if x is NULL
    -if(x) = 0		if x = 0
    -if(x) = 1		otherwise (i.e. x is neither NULL nor 0).
    +if(x) = NULL        if x is NULL
    +if(x) = 0        if x = 0
    +if(x) = 1        otherwise (i.e. x is neither NULL nor 0).
     

    For the two argument form:

    -if(x,a) = NULL		if x is NULL
    -if(x,a) = 0		if x = 0
    -if(x,a) = a		otherwise (i.e. x is neither NULL nor 0).
    +if(x,a) = NULL        if x is NULL
    +if(x,a) = 0        if x = 0
    +if(x,a) = a        otherwise (i.e. x is neither NULL nor 0).
     

    For the three argument form:

    -if(x,a,b) = NULL	if x is NULL
    -if(x,a,b) = b		if x = 0
    -if(x,a,b) = a		otherwise (i.e. x is neither NULL nor 0).
    +if(x,a,b) = NULL    if x is NULL
    +if(x,a,b) = b        if x = 0
    +if(x,a,b) = a        otherwise (i.e. x is neither NULL nor 0).
     

    For the four argument form:

    -if(x,a,b,c) = NULL	if x is NULL
    -if(x,a,b,c) = a		if x > 0
    -if(x,a,b,c) = b		if x = 0
    -if(x,a,b,c) = c		if x < 0
    +if(x,a,b,c) = NULL    if x is NULL
    +if(x,a,b,c) = a        if x > 0
    +if(x,a,b,c) = b        if x = 0
    +if(x,a,b,c) = c        if x < 0
     
    More generally, all operators and most functions return NULL if *any* of their arguments are NULL. @@ -367,8 +368,8 @@

    NULL support

    and 4 argument forms of if() return NULL if the "selected" argument is NULL, e.g.:
    -if(0,a,b) = b	regardless of whether a is NULL
    -if(1,a,b) = a	regardless of whether b is NULL
    +if(0,a,b) = b    regardless of whether a is NULL
    +if(1,a,b) = a    regardless of whether b is NULL
     
    eval() always returns its last argument, so it only returns NULL if the last argument is NULL. @@ -396,8 +397,8 @@

    Usage from command line

     'result = volume * 2'
     
    -Without the quotes, the *, which has special meaning to the UNIX shell, -would be altered and r3.mapcalc would see something other than the *. +Without the quotes, the *, which has special meaning to the UNIX shell, +would be altered and r3.mapcalc would see something other than the *.

    Multiple computations

    @@ -408,7 +409,7 @@

    Backwards compatibility

    For the backwards compatibility with GRASS 6, -if no options are given, it manufactures file=- (which reads from +if no options are given, it manufactures file=- (which reads from stdin), so you can continue to use e.g.:
     r3.mapcalc < file
    @@ -419,7 +420,7 @@ 

    Backwards compatibility

    foo = 1 EOF
    -But unless you need compatibility with previous GRASS GIS versions, use file= +But unless you need compatibility with previous GRASS GIS versions, use file= explicitly, as stated above.

    When the map name contains uppercase letter(s) or a dot which are not @@ -502,11 +503,14 @@

    EXAMPLES

     ave = (a + b)/2
     
    + +

    To form a weighted average:

     ave = (5*a + 3*b)/8.0
     
    -To produce a binary representation of 3D grid +

    +To produce a binary representation of the 3D grid a so that category 0 remains 0 and all other categories become 1:

     mask = a != 0
    @@ -515,11 +519,15 @@ 

    EXAMPLES

     mask = if(a)
     
    + +

    To mask 3D grid b by 3D grid a:

     result = if(a,b)
     
    -To change all values below 5 to NULL, keep otherwise: + +

    +To change all values below 5 to NULL, keep value otherwise:

     newmap = if(map < 5, null(), map)
     
    @@ -562,7 +570,7 @@

    KNOWN ISSUES

    Any maps generated by a r3.mapcalc command only exist after the entire command has completed. All maps are generated concurrently, row-by-row (i.e. there is an implicit "for row in rows {...}" around the entire expression). -Thus the #, @, and [ ] operators cannot be used on a map +Thus the #, @, and [ ] operators cannot be used on a map generated within same r3.mapcalc command run.

    @@ -570,11 +578,11 @@ 

    KNOWN ISSUES

    othermap = newmap[-1, 0] / newmap[1, 0]
    -

    Continuation lines must end with a \ and have no trailing +

    Continuation lines must end with a \ and have no trailing white space (blanks or tabs). If the user does leave white space at the end of continuation lines, the error messages produced by r3.mapcalc will be meaningless and the equation will not work as the user intended. -This is particularly important for the eval() function. +This is particularly important for the eval() function.

    Currently, there is no comment mechanism in r3.mapcalc. Perhaps adding a capability that would cause the entire line to be @@ -584,7 +592,7 @@

    KNOWN ISSUES

    of simply a blank line. This would make separation of multiple scripts separable by white space.

    r3.mapcalc does not print a warning in case of operations on -NULL cells. It is left to the user to utilize the isnull() function. +NULL cells. It is left to the user to utilize the isnull() function.

    REFERENCES

    diff --git a/raster/r.mfilter/getfilt.c b/raster/r.mfilter/getfilt.c index 02762a5284f..809c958f0fa 100644 --- a/raster/r.mfilter/getfilt.c +++ b/raster/r.mfilter/getfilt.c @@ -158,5 +158,6 @@ FILTER *get_filter(char *name, int *nfilters, char *title) } *nfilters = count; + fclose(fd); return filter; } diff --git a/raster/r.neighbors/r.neighbors.html b/raster/r.neighbors/r.neighbors.html index 497f807a725..84ce60a3af6 100644 --- a/raster/r.neighbors/r.neighbors.html +++ b/raster/r.neighbors/r.neighbors.html @@ -303,25 +303,26 @@

    FLAGS

    The exact masks for the first few neighborhood sizes are as follows:

    -3x3     . X .		5x5	. . X . .	7x7	. . . X . . .
    -        X O X			. X X X .		. X X X X X .
    -        . X .			X X O X X		. X X X X X .
    -				. X X X .		X X X O X X X
    - 				. . X . .		. X X X X X .
    -							. X X X X X .
    -        						. . . X . . .
    -
    -9x9	. . . . X . . . .		11x11   . . . . . X . . . . .
    -	. . X X X X X . .			. . X X X X X X X . .
    -        . X X X X X X X .			. X X X X X X X X X .
    -        . X X X X X X X .			. X X X X X X X X X .
    -        X X X X O X X X X			. X X X X X X X X X .
    -        . X X X X X X X .			X X X X X O X X X X X
    -        . X X X X X X X .			. X X X X X X X X X .
    -        . . X X X X X . .			. X X X X X X X X X .
    -        . . . . X . . . .			. X X X X X X X X X .
    -				        	. . X X X X X X X . .
    -				        	. . . . . X . . . . .
    +3x3     . X .        5x5    . . X . .    7x7    . . . X . . .
    +        X O X               . X X X .           . X X X X X .
    +        . X .               X X O X X           . X X X X X .
    +                            . X X X .           X X X O X X X
    +                            . . X . .           . X X X X X .
    +                                                . X X X X X .
    +                                                . . . X . . .
    +
    +
    +9x9    . . . . X . . . .        11x11   . . . . . X . . . . .
    +       . . X X X X X . .                . . X X X X X X X . .
    +       . X X X X X X X .                . X X X X X X X X X .
    +       . X X X X X X X .                . X X X X X X X X X .
    +       X X X X O X X X X                . X X X X X X X X X .
    +       . X X X X X X X .                X X X X X O X X X X X
    +       . X X X X X X X .                . X X X X X X X X X .
    +       . . X X X X X . .                . X X X X X X X X X .
    +       . . . . X . . . .                . X X X X X X X X X .
    +                                        . . X X X X X X X . .
    +                                        . . . . . X . . . . .
     
    diff --git a/raster/r.null/r.null.html b/raster/r.null/r.null.html index 4d92b027d65..58293e58822 100644 --- a/raster/r.null/r.null.html +++ b/raster/r.null/r.null.html @@ -41,7 +41,7 @@

    NULL data compression

    By default no data files (i.e., NULL files) are not compressed unless a specific environment variable is set. The NULL file compression must be -explicitly turned on with export GRASS_COMPRESS_NULLS=1.
    +explicitly turned on with export GRASS_COMPRESS_NULLS=1.
    Warning: such raster maps can then only be opened with GRASS GIS 7.2.0 or later. NULL file compression can be managed with r.null -z. diff --git a/raster/r.object.geometry/r.object.geometry.html b/raster/r.object.geometry/r.object.geometry.html index bdca7566868..ca3524ebffc 100644 --- a/raster/r.object.geometry/r.object.geometry.html +++ b/raster/r.object.geometry/r.object.geometry.html @@ -23,10 +23,10 @@

    DESCRIPTION

  • area
  • perimeter
  • compact_square (compactness compared to a square: - compact_square = 4 * sqrt(area) / perimeter) + compact_square = 4 * sqrt(area) / perimeter)
  • compact_circle (compactness compared to a circle: - compact_circle = perimeter / ( 2 * sqrt(PI * area) ))
  • -
  • fractal dimension ( fd = 2 * ( log(perimeter) / log(area + 0.001) ) )
  • + compact_circle = perimeter / ( 2 * sqrt(PI * area) )) +
  • fractal dimension ( fd = 2 * ( log(perimeter) / log(area + 0.001) ) )
  • mean x coordinate of object (in map units)
  • mean y coordinate of object (in map units)
  • diff --git a/raster/r.out.bin/r.out.bin.html b/raster/r.out.bin/r.out.bin.html index 0041286cf25..48dcc20e334 100644 --- a/raster/r.out.bin/r.out.bin.html +++ b/raster/r.out.bin/r.out.bin.html @@ -9,7 +9,7 @@

    DESCRIPTION

    NOTES

    With the -h flag, data can be directly used by -GMT as Grid Format 1 (float) or +GMT as Grid Format 1 (float) or 2 (short). For example:
    diff --git a/raster/r.out.gdal/attr.c b/raster/r.out.gdal/attr.c
    index ac40fb3c0bb..0f090fbbdaa 100644
    --- a/raster/r.out.gdal/attr.c
    +++ b/raster/r.out.gdal/attr.c
    @@ -28,7 +28,9 @@ int export_attr(GDALDatasetH hMEMDS, int band, const char *name,
     
         rcount = 0;
         Rast_init_colors(&sGrassColors);
    -    if (Rast_read_colors(name, mapset, &sGrassColors) >= 0) {
    +    /* only export color rules set by the user,
    +     * skip automatically generated default color rules */
    +    if (Rast_read_colors(name, mapset, &sGrassColors) > 0) {
             rcount = Rast_colors_count(&sGrassColors);
         }
     
    @@ -47,19 +49,165 @@ int export_attr(GDALDatasetH hMEMDS, int band, const char *name,
          * GFU_MaxCount
          */
     
    -    /* TODO: cats.ncats > 0 && rcount > 0
    -     * how to merge categories and color rules ?
    -     * what to do for a cell value that has a category but no color rule ?
    -     * what to do for a cell value that has a color rule but no category ?
    -     */
    -    if (cats.ncats > 0) {
    -        int use_minmax = 0;
    +    if (cats.ncats > 0 && rcount > 0) {
    +        bool use_minmax = false;
    +        int r1, g1, b1, r2, g2, b2;
    +
    +        /* merge categories and color rules:
    +         * go through categories and fetch corresponding color
    +         * write out value, label, red, green, blue
    +         * or the minmax variant
    +         */
    +
    +        if (maptype == CELL_TYPE) {
    +            for (i = 0; i < cats.ncats; i++) {
    +                label = Rast_get_ith_c_cat(&cats, i, &CellMin, &CellMax);
    +                if (!label || !*label) {
    +                    G_fatal_error(_("No label for category entry no %d "), i);
    +                }
    +                if (CellMin != CellMax) {
    +                    use_minmax = true;
    +                    break;
    +                }
    +            }
    +        }
    +        else {
    +            for (i = 0; i < cats.ncats; i++) {
    +                label = Rast_get_ith_d_cat(&cats, i, &dfCellMin, &dfCellMax);
    +                if (!label || !*label) {
    +                    G_fatal_error(_("No label for category entry no %d "), i);
    +                }
    +                if (dfCellMin != dfCellMax) {
    +                    use_minmax = true;
    +                    break;
    +                }
    +            }
    +        }
    +
    +        /* create new raster attribute table */
    +        hrat = GDALCreateRasterAttributeTable();
    +
    +        if (use_minmax) {
    +            if (maptype == CELL_TYPE) {
    +                GDALRATCreateColumn(hrat, "min", GFT_Integer, GFU_Min);
    +                GDALRATCreateColumn(hrat, "max", GFT_Integer, GFU_Max);
    +            }
    +            else {
    +                GDALRATCreateColumn(hrat, "min", GFT_Real, GFU_Min);
    +                GDALRATCreateColumn(hrat, "max", GFT_Real, GFU_Max);
    +            }
    +            GDALRATCreateColumn(hrat, "label", GFT_String, GFU_Name);
    +            GDALRATCreateColumn(hrat, "redmin", GFT_Integer, GFU_RedMin);
    +            GDALRATCreateColumn(hrat, "redmax", GFT_Integer, GFU_RedMax);
    +            GDALRATCreateColumn(hrat, "greenmin", GFT_Integer, GFU_GreenMin);
    +            GDALRATCreateColumn(hrat, "greenmax", GFT_Integer, GFU_GreenMax);
    +            GDALRATCreateColumn(hrat, "bluemin", GFT_Integer, GFU_BlueMin);
    +            GDALRATCreateColumn(hrat, "bluemax", GFT_Integer, GFU_BlueMax);
    +
    +            GDALRATSetRowCount(hrat, cats.ncats);
    +
    +            if (maptype == CELL_TYPE) {
    +                for (i = 0; i < cats.ncats; i++) {
    +                    label = Rast_get_ith_c_cat(&cats, i, &CellMin, &CellMax);
    +                    GDALRATSetValueAsInt(hrat, i, 0, CellMin);
    +                    GDALRATSetValueAsInt(hrat, i, 1, CellMax);
    +                    GDALRATSetValueAsString(hrat, i, 2, label);
    +
    +                    Rast_get_color(&CellMin, &r1, &g1, &b1, &sGrassColors,
    +                                   maptype);
    +                    Rast_get_color(&CellMax, &r2, &g2, &b2, &sGrassColors,
    +                                   maptype);
    +                    GDALRATSetValueAsInt(hrat, i, 3, r1);
    +                    GDALRATSetValueAsInt(hrat, i, 4, r2);
    +                    GDALRATSetValueAsInt(hrat, i, 5, g1);
    +                    GDALRATSetValueAsInt(hrat, i, 6, g2);
    +                    GDALRATSetValueAsInt(hrat, i, 7, b1);
    +                    GDALRATSetValueAsInt(hrat, i, 8, b2);
    +                }
    +            }
    +            else {
    +                for (i = 0; i < cats.ncats; i++) {
    +                    label =
    +                        Rast_get_ith_d_cat(&cats, i, &dfCellMin, &dfCellMax);
    +                    GDALRATSetValueAsDouble(hrat, i, 0, dfCellMin);
    +                    GDALRATSetValueAsDouble(hrat, i, 1, dfCellMax);
    +                    GDALRATSetValueAsString(hrat, i, 2, label);
    +
    +                    Rast_get_color(&dfCellMin, &r1, &g1, &b1, &sGrassColors,
    +                                   DCELL_TYPE);
    +                    Rast_get_color(&dfCellMax, &r2, &g2, &b2, &sGrassColors,
    +                                   DCELL_TYPE);
    +                    GDALRATSetValueAsInt(hrat, i, 3, r1);
    +                    GDALRATSetValueAsInt(hrat, i, 4, r2);
    +                    GDALRATSetValueAsInt(hrat, i, 5, g1);
    +                    GDALRATSetValueAsInt(hrat, i, 6, g2);
    +                    GDALRATSetValueAsInt(hrat, i, 7, b1);
    +                    GDALRATSetValueAsInt(hrat, i, 8, b2);
    +                }
    +            }
    +        }
    +        else {
    +            if (maptype == CELL_TYPE) {
    +                GDALRATCreateColumn(hrat, "value", GFT_Integer, GFU_MinMax);
    +            }
    +            else {
    +                GDALRATCreateColumn(hrat, "value", GFT_Real, GFU_MinMax);
    +            }
    +            GDALRATCreateColumn(hrat, "label", GFT_String, GFU_Name);
    +            GDALRATCreateColumn(hrat, "red", GFT_Integer, GFU_Red);
    +            GDALRATCreateColumn(hrat, "green", GFT_Integer, GFU_Green);
    +            GDALRATCreateColumn(hrat, "blue", GFT_Integer, GFU_Blue);
    +
    +            GDALRATSetRowCount(hrat, cats.ncats);
    +
    +            if (maptype == CELL_TYPE) {
    +                for (i = 0; i < cats.ncats; i++) {
    +                    label = Rast_get_ith_c_cat(&cats, i, &CellMin, &CellMax);
    +                    GDALRATSetValueAsInt(hrat, i, 0, CellMin);
    +                    GDALRATSetValueAsString(hrat, i, 1, label);
    +
    +                    Rast_get_color(&CellMin, &r1, &g1, &b1, &sGrassColors,
    +                                   maptype);
    +                    GDALRATSetValueAsInt(hrat, i, 2, r1);
    +                    GDALRATSetValueAsInt(hrat, i, 3, g1);
    +                    GDALRATSetValueAsInt(hrat, i, 4, b1);
    +                }
    +            }
    +            else {
    +                for (i = 0; i < cats.ncats; i++) {
    +                    label =
    +                        Rast_get_ith_d_cat(&cats, i, &dfCellMin, &dfCellMax);
    +                    GDALRATSetValueAsDouble(hrat, i, 0, dfCellMin);
    +                    GDALRATSetValueAsString(hrat, i, 1, label);
    +
    +                    Rast_get_color(&dfCellMin, &r1, &g1, &b1, &sGrassColors,
    +                                   DCELL_TYPE);
    +                    GDALRATSetValueAsInt(hrat, i, 2, r1);
    +                    GDALRATSetValueAsInt(hrat, i, 3, g1);
    +                    GDALRATSetValueAsInt(hrat, i, 4, b1);
    +                }
    +            }
    +        }
    +
    +        if (GDALSetDefaultRAT(hBand, hrat) != CE_None) {
    +            G_warning(_("Failed to set raster attribute table"));
    +            ret = -1;
    +        }
    +        /* GDALRATDumpReadable(hrat, stdout); */
    +
    +        GDALDestroyRasterAttributeTable(hrat);
    +    }
    +    else if (cats.ncats > 0 && rcount == 0) {
    +        bool use_minmax = false;
     
             if (maptype == CELL_TYPE) {
                 for (i = 0; i < cats.ncats; i++) {
                     label = Rast_get_ith_c_cat(&cats, i, &CellMin, &CellMax);
    +                if (!label || !*label) {
    +                    G_fatal_error(_("No label for category entry no %d "), i);
    +                }
                     if (CellMin != CellMax) {
    -                    use_minmax = 1;
    +                    use_minmax = true;
                         break;
                     }
                 }
    @@ -67,8 +215,11 @@ int export_attr(GDALDatasetH hMEMDS, int band, const char *name,
             else {
                 for (i = 0; i < cats.ncats; i++) {
                     label = Rast_get_ith_d_cat(&cats, i, &dfCellMin, &dfCellMax);
    +                if (!label || !*label) {
    +                    G_fatal_error(_("No label for category entry no %d "), i);
    +                }
                     if (dfCellMin != dfCellMax) {
    -                    use_minmax = 1;
    +                    use_minmax = true;
                         break;
                     }
                 }
    diff --git a/raster/r.out.gdal/r.out.gdal.html b/raster/r.out.gdal/r.out.gdal.html
    index 06d269944c5..8a24e2bb870 100644
    --- a/raster/r.out.gdal/r.out.gdal.html
    +++ b/raster/r.out.gdal/r.out.gdal.html
    @@ -9,7 +9,7 @@ 

    DESCRIPTION

    (createopt="TFW=YES,COMPRESS=DEFLATE").

    For possible createopt and metaopt parameters please consult the individual -supported formats +supported formats pages on the GDAL website. The createopt parameter may be used to create TFW or World files ("TFW=YES","WORLDFILE=ON"). @@ -25,7 +25,7 @@

    DESCRIPTION

    SUPPORTED RASTER FORMATS

    -The set of supported +The set of supported raster formats written by r.out.gdal depends on the local GDAL installation, printed with the -l flag. Available may be (incomplete list):

    @@ -110,7 +110,7 @@

    Ranges of GDAL data types

    Adding overviews to speed up map display in other software

    -Adding overviews with gdaladdo +Adding overviews with gdaladdo after exporting can speed up display. The overviews are created internally within the exported file. The amount of levels (power-of-two factors) are controlled with the overviews parameter. The higher the overview level @@ -151,23 +151,23 @@

    Improving GeoTIFF compatibility

    Here are some things to try:
      -
    • Create a World file with createopt="TFW=YES". +
    • Create a World file with createopt="TFW=YES".
    • Do not use GeoTIFF internal compression. Other GIS software often supports only a subset of the available compression methods with the supported methods differing between GIS software packages. Unfortunately this means the output image can be rather huge, but the file can be -compressed with software like zip, gnuzip, or bzip2. +compressed with software like zip, gnuzip, or bzip2.
    • Skip exporting the color table. Color tables are not always properly rendered, particularly for type UInt16, and the GeoTIFF file can appear completely black. If you are lucky the problematic software package has a method to reset the color table and assign a new color table -(sometimes called symbology). +(sometimes called symbology).
    • -
    • Keep metadata simple with createopt="PROFILE=GeoTIFF" or -createopt="PROFILE=BASELINE". With BASELINE no GDAL or GeoTIFF -tags will be written and a World file is required (createopt="TFW=YES"). +
    • Keep metadata simple with createopt="PROFILE=GeoTIFF" or +createopt="PROFILE=BASELINE". With BASELINE no GDAL or GeoTIFF +tags will be written and a World file is required (createopt="TFW=YES").
    As far as the user just use r.ros together with r.spread, there is no need to - concern about these output units. + concern about these output units.

    REFERENCES

    diff --git a/raster/r.series.accumulate/r.series.accumulate.html b/raster/r.series.accumulate/r.series.accumulate.html index f621ee0167c..f97f6d82311 100644 --- a/raster/r.series.accumulate/r.series.accumulate.html +++ b/raster/r.series.accumulate/r.series.accumulate.html @@ -93,7 +93,7 @@

    NOTES

    The maximum number of raster maps that can be processed is given by the user-specific limit of the operating system. For example, the soft limits for users are typically 1024 files. The soft limit can be changed with e.g. -ulimit -n 4096 (UNIX-based operating systems) but it cannot be +ulimit -n 4096 (UNIX-based operating systems) but it cannot be higher than the hard limit. If the latter is too low, you can as superuser add an entry in: diff --git a/raster/r.series.interp/r.series.interp.html b/raster/r.series.interp/r.series.interp.html index a05b792afbf..77df2a89d91 100644 --- a/raster/r.series.interp/r.series.interp.html +++ b/raster/r.series.interp/r.series.interp.html @@ -7,7 +7,7 @@

    DESCRIPTION

    The following interpolation methods are supported.
      -
    • linear: Linear interpolation. At least two input maps and data positions are required. +
    • linear: Linear interpolation. At least two input maps and data positions are required.

    EXAMPLES

    diff --git a/raster/r.series/r.series.html b/raster/r.series/r.series.html index 0d1c12b5bb9..89ec49ad140 100644 --- a/raster/r.series/r.series.html +++ b/raster/r.series/r.series.html @@ -12,29 +12,29 @@

    DESCRIPTION

    Following methods are available:
      -
    • average: average value -
    • count: count of non-NULL cells -
    • median: median value -
    • mode: most frequently occurring value -
    • minimum: lowest value -
    • min_raster: raster map number with the minimum time-series value -
    • maximum: highest value -
    • max_raster: raster map number with the maximum time-series value -
    • stddev: standard deviation -
    • range: range of values (max - min) -
    • sum: sum of values -
    • variance: statistical variance -
    • diversity: number of different values -
    • slope: linear regression slope -
    • offset: linear regression offset -
    • detcoeff: linear regression coefficient of determination -
    • tvalue: linear regression t-value -
    • quart1: first quartile -
    • quart3: third quartile -
    • perc90: ninetieth percentile -
    • quantile: arbitrary quantile -
    • skewness: skewness -
    • kurtosis: kurtosis +
    • average: average value
    • +
    • count: count of non-NULL cells
    • +
    • median: median value
    • +
    • mode: most frequently occurring value
    • +
    • minimum: lowest value
    • +
    • min_raster: raster map number with the minimum time-series value
    • +
    • maximum: highest value
    • +
    • max_raster: raster map number with the maximum time-series value
    • +
    • stddev: standard deviation
    • +
    • range: range of values (max - min)
    • +
    • sum: sum of values
    • +
    • variance: statistical variance
    • +
    • diversity: number of different values
    • +
    • slope: linear regression slope
    • +
    • offset: linear regression offset
    • +
    • detcoeff: linear regression coefficient of determination
    • +
    • tvalue: linear regression t-value
    • +
    • quart1: first quartile
    • +
    • quart3: third quartile
    • +
    • perc90: ninetieth percentile
    • +
    • quantile: arbitrary quantile
    • +
    • skewness: skewness
    • +
    • kurtosis: kurtosis
    Note that most parameters accept multiple answers, allowing multiple @@ -107,7 +107,7 @@

    Management of open file limits

    The maximum number of raster maps that can be processed is given by the user-specific limit of the operating system. For example, the soft limits for users are typically 1024 files. The soft limit can be changed with e.g. -ulimit -n 4096 (UNIX-based operating systems) but it cannot be +ulimit -n 4096 (UNIX-based operating systems) but it cannot be higher than the hard limit. If the latter is too low, you can as superuser add an entry in: @@ -196,7 +196,7 @@

    EXAMPLES

    Example to use the file option of r.series:

    -cat > input.txt << EOF
    +cat > input.txt << EOF
     map1
     map2
     map3
    @@ -211,7 +211,7 @@ 

    EXAMPLES

    weights we can leave it out:
    -cat > input.txt << EOF
    +cat > input.txt << EOF
     map1
     map2|0.75
     map3
    diff --git a/raster/r.sim/r.sim.water/r.sim.water.html b/raster/r.sim/r.sim.water/r.sim.water.html
    index 79cef1bfadc..af54dea4490 100644
    --- a/raster/r.sim/r.sim.water/r.sim.water.html
    +++ b/raster/r.sim/r.sim.water/r.sim.water.html
    @@ -208,37 +208,37 @@ 

    REFERENCES

    and short term terrain evolution in Open Source GIS. In: C.T. Miller, M.W. Farthing, V.G. Gray, G.F. Pinder eds., Proceedings of the XVth International Conference on Computational Methods in Water -Resources (CMWR XV), June 13-17 2004, Chapel Hill, NC, USA, Elsevier, pp. 1479-1490. +Resources (CMWR XV), June 13-17 2004, Chapel Hill, NC, USA, Elsevier, pp. 1479-1490.
  • Mitasova H, Mitas, L., 2000, Modeling spatial processes in multiscale framework: exploring duality between particles and fields, -plenary talk at GIScience2000 conference, Savannah, GA. +plenary talk at GIScience2000 conference, Savannah, GA.
  • Mitas, L., and Mitasova, H., 1998, Distributed soil erosion simulation -for effective erosion prevention. Water Resources Research, 34(3), 505-516. +for effective erosion prevention. Water Resources Research, 34(3), 505-516.
  • Mitasova, H., Mitas, L., 2001, Multiscale soil erosion simulations for land use management, In: Landscape erosion and landscape evolution modeling, Harmon R. and Doe W. eds., -Kluwer Academic/Plenum Publishers, pp. 321-347. +Kluwer Academic/Plenum Publishers, pp. 321-347.
  • Hofierka, J, Mitasova, H., Mitas, L., 2002. GRASS and modeling landscape processes using duality between particles and fields. Proceedings of the Open source GIS - GRASS users conference 2002 - Trento, Italy, 11-13 September 2002. -PDF +PDF
  • Hofierka, J., Knutova, M., 2015, Simulating aspects of a flash flood using the Monte Carlo method and GRASS GIS: a case study of the Malá Svinka Basin (Slovakia), Open Geosciences. Volume 7, Issue 1, ISSN (Online) 2391-5447, DOI: 10.1515/geo-2015-0013, -April 2015 +April 2015
  • Neteler, M. and Mitasova, H., 2008, Open Source GIS: A GRASS GIS Approach. Third Edition. -The International Series in Engineering and Computer Science: Volume 773. Springer New York Inc, p. 406. +The International Series in Engineering and Computer Science: Volume 773. Springer New York Inc, p. 406.
  • SEE ALSO

    diff --git a/raster/r.slope.aspect/r.slope.aspect.html b/raster/r.slope.aspect/r.slope.aspect.html index a76bd33e477..421f7f84e78 100644 --- a/raster/r.slope.aspect/r.slope.aspect.html +++ b/raster/r.slope.aspect/r.slope.aspect.html @@ -30,7 +30,7 @@

    DESCRIPTION

    # convert angles from CCW from East to CW from North # modulus (%) can not be used with floating point aspect values r.mapcalc "azimuth_aspect = if(ccw_aspect == 0, 0, \ - if(ccw_aspect < 90, 90 - ccw_aspect, \ + if(ccw_aspect < 90, 90 - ccw_aspect, \ 450 - ccw_aspect)))"
    @@ -240,15 +240,15 @@

    Classification of major aspect directions in compass orientation

    # generate compass orientation and classify four major directions (N, E, S, W) r.mapcalc "aspect_4_directions = eval( \\ compass=(450 - myaspect ) % 360, \\ - if(compass >=0. && compass < 45., 1) \\ - + if(compass >=45. && compass < 135., 2) \\ - + if(compass >=135. && compass < 225., 3) \\ - + if(compass >=225. && compass < 315., 4) \\ - + if(compass >=315., 1) \\ + if(compass >=0. && compass < 45., 1) \\ + + if(compass >=45. && compass < 135., 2) \\ + + if(compass >=135. && compass < 225., 3) \\ + + if(compass >=225. && compass < 315., 4) \\ + + if(compass >=315., 1) \\ )" # assign text labels -r.category aspect_4_directions separator=comma rules=- << EOF +r.category aspect_4_directions separator=comma rules=- << EOF 1,north 2,east 3,south @@ -256,7 +256,7 @@

    Classification of major aspect directions in compass orientation

    EOF # assign color table -r.colors aspect_4_directions rules=- << EOF +r.colors aspect_4_directions rules=- << EOF 1 253,184,99 2 178,171,210 3 230,97,1 @@ -273,13 +273,14 @@

    REFERENCES

    • Horn, B. K. P. (1981). Hill Shading and the Reflectance Map, Proceedings -of the IEEE, 69(1):14-47. +of the IEEE, 69(1):14-47.
    • Mitasova, H. (1985). Cartographic aspects of computer surface modeling. PhD thesis. -Slovak Technical University , Bratislava +Slovak Technical University , Bratislava
    • Hofierka, J., Mitasova, H., Neteler, M., 2009. Geomorphometry in GRASS GIS. In: Hengl, T. and Reuter, H.I. (Eds), Geomorphometry: Concepts, Software, Applications. Developments in Soil Science, vol. 33, Elsevier, 387-410 pp, -http://www.geomorphometry.org +doi:10.1016/S0166-2481(08)00017-2, +https://www.geomorphometry.org

    SEE ALSO

    diff --git a/raster/r.solute.transport/r.solute.transport.html b/raster/r.solute.transport/r.solute.transport.html index 0046ac48f9f..ff77a35f419 100644 --- a/raster/r.solute.transport/r.solute.transport.html +++ b/raster/r.solute.transport/r.solute.transport.html @@ -108,7 +108,7 @@

    EXAMPLE

    gs.run_command("r.mapcalc", expression="phead = if(col() == 1 , 50, 40)") gs.run_command("r.mapcalc", expression="phead = if(col() ==200 , 45 + row()/40, phead)") gs.run_command("r.mapcalc", expression="status = if(col() == 1 || col() == 200 , 2, 1)") -gs.run_command("r.mapcalc", expression="well = if((row() == 50 && col() == 175) || (row() == 10 && col() == 135) , -0.001, 0)") +gs.run_command("r.mapcalc", expression="well = if((row() == 50 && col() == 175) || (row() == 10 && col() == 135) , -0.001, 0)") gs.run_command("r.mapcalc", expression="hydcond = 0.00005") gs.run_command("r.mapcalc", expression="recharge = 0") gs.run_command("r.mapcalc", expression="top_conf = 20") diff --git a/raster/r.spread/r.spread.html b/raster/r.spread/r.spread.html index 1980e61aae5..99cdcabc1d0 100644 --- a/raster/r.spread/r.spread.html +++ b/raster/r.spread/r.spread.html @@ -12,9 +12,9 @@

    DESCRIPTION

    1. the uneven conditions from location to location, which can be called -spatial heterogeneity, and +spatial heterogeneity, and
    2. the uneven conditions in different directions, which can be called -anisotropy. +anisotropy.

    The anisotropy of spread occurs when any of the determining factors @@ -48,8 +48,8 @@

    DESCRIPTION

    r.spreadpath can be found in Xu (1994). -

    Options spot_dist, w_speed and f_mois must all -be given if the -s (spotting) flag is used. +

    Options spot_dist, w_speed and f_mois must all +be given if the -s (spotting) flag is used.

    EXAMPLE

    diff --git a/raster/r.statistics/o_adev.c b/raster/r.statistics/o_adev.c index 8c20f1f380a..b1db6acb6d1 100644 --- a/raster/r.statistics/o_adev.c +++ b/raster/r.statistics/o_adev.c @@ -71,6 +71,7 @@ int o_adev(const char *basemap, const char *covermap, const char *outputmap, G_popen_close(&stats_child); G_popen_close(&reclass_child); + G_free(tab); return 0; } diff --git a/raster/r.stats.quantile/r.stats.quantile.html b/raster/r.stats.quantile/r.stats.quantile.html index 4c387281b7b..b71bc0ea50e 100644 --- a/raster/r.stats.quantile/r.stats.quantile.html +++ b/raster/r.stats.quantile/r.stats.quantile.html @@ -5,7 +5,7 @@

    DESCRIPTION

    in a "base layer". It provides quantile calculations as selected "zonal statistics". -

    NOTES

    +

    NOTES

    r.stats.quantile is intended to be a partial replacement for r.statistics, with support @@ -19,9 +19,9 @@

    NOTES

    EXAMPLE

    -In this example, the raster polygon map zipcodes in the North +In this example, the raster polygon map zipcodes in the North Carolina sample dataset is used to calculate quantile raster statistics using -the elevation raster map: +the elevation raster map:
     g.region raster=zipcodes -p
    @@ -46,10 +46,9 @@ 

    REFERENCES

    diff --git a/raster/r.stats.zonal/r.stats.zonal.html b/raster/r.stats.zonal/r.stats.zonal.html index 9e5d83bef01..92d175583fe 100644 --- a/raster/r.stats.zonal/r.stats.zonal.html +++ b/raster/r.stats.zonal/r.stats.zonal.html @@ -9,7 +9,7 @@

    DESCRIPTION

    Notably, the output of this module is spatial: The resulting values are recorded as cell values in the output raster map. -

    NOTES

    +

    NOTES

    r.stats.zonal is intended to be a partial replacement for r.statistics, with support @@ -18,9 +18,9 @@

    NOTES

    EXAMPLE

    -In this example, the raster polygon map zipcodes in the North +In this example, the raster polygon map zipcodes in the North Carolina sample dataset is used to calculate zonal raster statistics using -the elevation raster map: +the elevation raster map:
     g.region raster=zipcodes -p
    diff --git a/raster/r.stats/cell_stats.c b/raster/r.stats/cell_stats.c
    index f07f1451a42..51e46b6acc3 100644
    --- a/raster/r.stats/cell_stats.c
    +++ b/raster/r.stats/cell_stats.c
    @@ -66,6 +66,10 @@ int cell_stats(int fd[], int with_percents, int with_counts, int with_areas,
         sort_cell_stats(do_sort);
         print_cell_stats(fmt, with_percents, with_counts, with_areas, with_labels,
                          fs);
    +    for (i = 0; i < nfiles; i++) {
    +        G_free(cell[i]);
    +    }
    +    G_free(cell);
     
         return 0;
     }
    diff --git a/raster/r.stream.extract/r.stream.extract.html b/raster/r.stream.extract/r.stream.extract.html
    index acc9030d0b0..6c0d57b253b 100644
    --- a/raster/r.stream.extract/r.stream.extract.html
    +++ b/raster/r.stream.extract/r.stream.extract.html
    @@ -28,12 +28,12 @@ 

    NOTES

    Option threshold defines the minimum (optionally modified) flow accumulation value that will initiate a new stream. If Montgomery's method for channel initiation is used, the cell value of the -accumulation input map is multiplied by (tan(local -slope))mexp and then compared +accumulation input map is multiplied by (tan(local +slope))mexp and then compared to threshold. If mexp is given, then the method of Montgomery and Foufoula-Georgiou (1993) is used to initiate a stream with this value. The cell value of the accumulation input map is multiplied -by (tan(local slope))mexp and then compared +by (tan(local slope))mexp and then compared to threshold. If threshold is reached or exceeded, a new stream is initiated. The default value 0 disables Montgomery. Montgomery and Foufoula-Georgiou (1993) generally recommend to use 2.0 as @@ -95,7 +95,7 @@

    Stream extraction

    In case of getting the error message -ERROR: Accumulation raster map is NULL but elevation map is not NULL +ERROR: Accumulation raster map is NULL but elevation map is not NULL the computational region must be carefully adjusted to exclude NULL pixels in the accumulation raster map prior to stream extraction. @@ -116,7 +116,7 @@

    Weighed flow accumulation

    Another possibility is to restrict channel initiation to valleys determined from terrain morphology. Valleys can be determined with -r.param.scale method=crosc +r.param.scale method=crosc (cross-sectional or tangential curvature). Curvature values < 0 indicate concave features, i.e. valleys. The size of the processing window determines whether narrow or broad valleys will be identified @@ -153,7 +153,7 @@

    Stream output

    attribute table for layer 1 holds information about the type of stream segment: start segment, or intermediate segment with tributaries, and about the stream network this stream or node belongs to. Columns are -cat int,stream_type varchar(),type_code int,network int. The +cat int,stream_type varchar(),type_code int,network int. The network attribute is the network ID of the stream/node. The encoding for type_code is 0 = start, 1 = intermediate. In layer 2, categories are identical to type_code in layer 1 with additional category 2 = @@ -253,8 +253,8 @@

    REFERENCES

    Data
    , Proceedings of International Geographic Information Systems (IGIS) Symposium '89, pp 275-281 (Baltimore, MD, 18-19 March -1989). URL: -http://faculty.wiu.edu/CR-Ehlschlaeger2/older/IGIS/paper.html +1989). URL: +https://www.researchgate.net/publication/243781937_Using_the_AT_search_algorithm_to_develop_hydrologic_models_from_digital_elevation_data
  • Holmgren, P. (1994). Multiple flow direction algorithms for runoff modelling in grid based elevation models: An empirical evaluation. diff --git a/raster/r.sun/r.sun.html b/raster/r.sun/r.sun.html index ec43813d51c..db141bdf304 100644 --- a/raster/r.sun/r.sun.html +++ b/raster/r.sun/r.sun.html @@ -13,8 +13,8 @@

    DESCRIPTION

    For latitude-longitude coordinates it requires that the elevation map is in meters. The rules are:

      -
    • lat/lon coordinates: elevation in meters; -
    • Other coordinates: elevation in the same unit as the easting-northing coordinates. +
    • lat/lon coordinates: elevation in meters;
    • +
    • Other coordinates: elevation in the same unit as the easting-northing coordinates.
    The solar geometry of the model is based on the works of Krcho (1990), later @@ -104,7 +104,7 @@

    DESCRIPTION

    15° of the sky in an hour, the default step of half an hour will produce 7.5° steps in the data. For relatively smooth output with the sun placed for every degree of movement in the sky you should set the -step to 4 minutes or less. step=0.05 is equivalent +step to 4 minutes or less. step=0.05 is equivalent to every 3 minutes. Of course setting the time step to be very fine proportionally increases the module's running time.

    The output units are in Wh per squared meter per given @@ -234,7 +234,7 @@

    Large maps and out of memory problems

    partitionable, the input raster maps are using the npartitions parameter. -In case of out of memory error (ERROR: G_malloc: out of memory), the +In case of out of memory error (ERROR: G_malloc: out of memory), the npartitions parameter can be used to run a segmented calculation which consumes less memory during the computations. @@ -294,8 +294,8 @@

    EXAMPLES

    We can compute the day of year from a specific date in Python:
    ->>> import datetime
    ->>> datetime.datetime(2014, 6, 21).timetuple().tm_yday
    +>>> import datetime
    +>>> datetime.datetime(2014, 6, 21).timetuple().tm_yday
     172
     
    @@ -317,53 +317,53 @@

    REFERENCES

  • Hofierka, J., Suri, M. (2002): The solar radiation model for Open source GIS: implementation and applications. International GRASS users conference in Trento, Italy, September 2002. -(PDF) +(PDF)
  • Hofierka, J. (1997). Direct solar radiation modelling within an open GIS environment. Proceedings of JEC-GI'97 conference in Vienna, Austria, IOS -Press Amsterdam, 575-584. +Press Amsterdam, 575-584.
  • Jenco, M. (1992). Distribution of direct solar radiation on georelief and its modelling by means of complex digital model of terrain (in Slovak). Geograficky -casopis, 44, 342-355. +casopis, 44, 342-355.
  • Kasten, F. (1996). The Linke turbidity factor based on improved values of -the integral Rayleigh optical thickness. Solar Energy, 56 (3), 239-244. +the integral Rayleigh optical thickness. Solar Energy, 56 (3), 239-244.
  • Kasten, F., Young, A. T. (1989). Revised optical air mass tables and approximation -formula. Applied Optics, 28, 4735-4738. +formula. Applied Optics, 28, 4735-4738.
  • Kittler, R., Mikler, J. (1986): Basis of the utilization of solar radiation -(in Slovak). VEDA, Bratislava, p. 150. +(in Slovak). VEDA, Bratislava, p. 150.
  • Krcho, J. (1990). Morfometrická analza a digitálne modely georeliéfu (Morphometric analysis and digital models of georelief, in Slovak). -VEDA, Bratislava. +VEDA, Bratislava.
  • Muneer, T. (1990). Solar radiation model for Europe. Building services engineering -research and technology, 11, 4, 153-163. +research and technology, 11, 4, 153-163.
  • Neteler, M., Mitasova, H. (2002): Open Source GIS: A GRASS GIS Approach, Kluwer Academic Publishers. (Appendix explains formula; -r.sun script download) +r.sun script download)
  • Page, J. ed. (1986). Prediction of solar radiation on inclined surfaces. Solar energy R&D in the European Community, series F - Solar radiation data, -Dordrecht (D. Reidel), 3, 71, 81-83. +Dordrecht (D. Reidel), 3, 71, 81-83.
  • Page, J., Albuisson, M., Wald, L. (2001). The European solar radiation atlas: -a valuable digital tool. Solar Energy, 71, 81-83. +a valuable digital tool. Solar Energy, 71, 81-83.
  • Rigollier, Ch., Bauer, O., Wald, L. (2000). On the clear sky model of the ESRA - European Solar radiation Atlas - with respect to the Heliosat method. -Solar energy, 68, 33-48. +Solar energy, 68, 33-48.
  • Scharmer, K., Greif, J., eds., (2000). The European solar radiation atlas, Vol. 2: Database and exploitation software. Paris (Les Presses de l'École -des Mines). +des Mines).
  • Joint Research Centre: GIS solar radiation database for Europe and -Solar radiation and GIS +Solar radiation and GIS
  • AUTHORS

    diff --git a/raster/r.sunmask/r.sunmask.html b/raster/r.sunmask/r.sunmask.html index 27a2ee35f7a..7b43ac502ef 100644 --- a/raster/r.sunmask/r.sunmask.html +++ b/raster/r.sunmask/r.sunmask.html @@ -80,7 +80,7 @@

    NOTES

    correction for atmosphere refraction. The output without -g flag contains related indications. -

    EXAMPLE

    +

    EXAMPLE

    Example for North Carolina sample data set for the calculation of sun position angles and more: diff --git a/raster/r.support/r.support.html b/raster/r.support/r.support.html index 13fdf2a26af..24a2671b72b 100644 --- a/raster/r.support/r.support.html +++ b/raster/r.support/r.support.html @@ -15,31 +15,45 @@

    Raster semantic labels and band management

    EXAMPLES

    -These examples are based on the North Carolina dataset, more specifically the landuse raster map. +These examples are based on the North Carolina dataset, more specifically +the landuse raster map. Copy the landuse map to the current mapset -
    g.copy raster=landuse,my_landuse
    +
    +
    +g.copy raster=landuse,my_landuse
     

    Update statistics

    -
    r.support -s map=my_landuse
    +
    +
    +r.support -s map=my_landuse
     

    Update Title

    -
    r.support map=my_landuse title="Landuse copied"
    +
    +
    +r.support map=my_landuse title="Landuse copied"
     

    Append to History Metadata

    -
    r.support map=my_landuse history="Copied from PERMANENT mapset"
    +
    +
    +r.support map=my_landuse history="Copied from PERMANENT mapset"
     

    Update Units Display

    -
    r.support map=my_landuse units=meter
    +
    +
    +r.support map=my_landuse units=meter
     

    Set semantic label

    + Note: landuse map doesn't confirm to CORINE specification. This is an example only. -
    r.support map=my_landuse semantic_label=CORINE_LULC
    +
    +
    +r.support map=my_landuse semantic_label=CORINE_LULC
     

    NOTES

    @@ -48,7 +62,7 @@

    NOTES

    module will run non-interactively. If only the map name is given r.support will run interactively within a terminal shell and the user with be prompted for input. -

    Freeform metadata information is stored in a "hist" file which may be +

    Freeform metadata information is stored in a "hist" file which may be appended to by using the history option. Currently this is limited to 50 lines of text with a maximum line length of 78 characters. Any input larger than this will be wrapped to the next line. diff --git a/raster/r.surf.area/r.surf.area.html b/raster/r.surf.area/r.surf.area.html index 3cc1f6cd823..3b27470f9d8 100644 --- a/raster/r.surf.area/r.surf.area.html +++ b/raster/r.surf.area/r.surf.area.html @@ -2,8 +2,8 @@

    DESCRIPTION

    r.surf.area calculates area of regular 3D triangulated points (centers of cells) in current region by adding areas of triangles. -Therefore, area of a flat surface will be reported as (rows + cols --1) * (area of cell) less than area of flat region due to a half +Therefore, area of a flat surface will be reported as (rows + cols +-1) * (area of cell) less than area of flat region due to a half row and half column missing around the perimeter.

    NOTES

    @@ -12,7 +12,7 @@

    NOTES

    as a fractal shoreline problem, the more resolution the more detail, the more area, etc). This module uses the current region settings, not the resolution of the raster map. This is especially -important for surfaces with NULL values and highly irregular +important for surfaces with NULL values and highly irregular edges. The module does not [currently] attempt to correct for the error introduced by this edge effect. @@ -22,14 +22,14 @@

    NOTES

    Reported totals are:

      -
    1. "Plan" area of NULL values within the current GRASS +
    2. "Plan" area of NULL values within the current GRASS region
    3. -
    4. "Plan" area within calculation region (rows-1 * cols-1 * -cellarea)
    5. +
    6. "Plan" area within calculation region (rows-1 * cols-1 * +cellarea)
    7. Average of the minimum and maximum calculated 3d triangle area within this region
    8. -
    9. "Plan" area within current computational region (rows * cols * -cellarea)
    10. +
    11. "Plan" area within current computational region (rows * cols * +cellarea)
    12. Scaling of calculated area to current region
    @@ -70,7 +70,7 @@

    AUTHOR

    Bill Brown, USACERL December 21, 1994
    -Modified for floating point rasters and NULL values by Eric +Modified for floating point rasters and NULL values by Eric G. Miller (October 17, 2000)
    Updated for GRASS 7, and units option by Martin Landa, Czech Technical diff --git a/raster/r.surf.gauss/r.surf.gauss.html b/raster/r.surf.gauss/r.surf.gauss.html index 35949fe9c22..9386f41107a 100644 --- a/raster/r.surf.gauss/r.surf.gauss.html +++ b/raster/r.surf.gauss/r.surf.gauss.html @@ -49,4 +49,4 @@

    SEE ALSO

    AUTHOR

    -Jo Wood, ASSIST's home +Jo Wood, ASSIST's home diff --git a/raster/r.surf.idw/r.surf.idw.html b/raster/r.surf.idw/r.surf.idw.html index dac9c6ead76..dd985fbc178 100644 --- a/raster/r.surf.idw/r.surf.idw.html +++ b/raster/r.surf.idw/r.surf.idw.html @@ -17,7 +17,7 @@

    DESCRIPTION

    The -e flag is the error analysis option that interpolates values only for those cells of the input raster map which have non-zero values and -outputs the difference (see NOTES below). +outputs the difference (see NOTES below).

    The npoints parameter defines the number of nearest data points used to determine the interpolated value of an output raster cell. @@ -66,8 +66,8 @@

    NOTES

    for the former may include unacceptable nonconformities in the surface pattern. - -

    +

    Surface-generation error analysis

    + The -e flag option provides a standard surface-generation error analysis facility. It produces an output raster map of the difference of interpolated values minus input values for those cells diff --git a/raster/r.terraflow/r.terraflow.html b/raster/r.terraflow/r.terraflow.html index 29fa310c5dc..80f7e5463dd 100644 --- a/raster/r.terraflow/r.terraflow.html +++ b/raster/r.terraflow/r.terraflow.html @@ -44,12 +44,12 @@

    DESCRIPTION

    • On plateaus (flat areas that spill out) r.terraflow routes flow so that globally the flow goes towards the spill cells of -the plateaus. +the plateaus.
    • On sinks (flat areas that do not spill out, including one-cell pits) r.terraflow assigns flow by flooding the terrain until all the sinks are filled and assigning flow directions on the filled -terrain. +terrain.

    In order to flood the terrain, r.terraflow identifies all @@ -163,7 +163,7 @@

    EXAMPLES

    -
    +
    Flow accumulation
      -
    • no-data (null), if the respective point in the elevation map is no-data (null) -
    • -1, if the point is not visible -
    • the difference in elevation between the point and the viewpoint, if the point is visible. +
    • no-data (null), if the respective point in the elevation map is no-data (null)
    • +
    • -1, if the point is not visible
    • +
    • the difference in elevation between the point and the viewpoint, if the point is visible.

    @@ -58,8 +58,6 @@

    NOTES

    r.mapcalc can be used to create a negative of the viewshed map. - -

    By default the elevations are not adjusted for the curvature of the earth. The user can turn this on with flag @@ -163,10 +161,10 @@

    The algorithm

    - +
    - - + + @@ -187,7 +185,7 @@

    EXAMPLES

    -r.viewshed example
    +r.viewshed example
    Viewshed shown on shaded terrain (observer position in the north-east quadrant with white dot; 5m above ground)
    @@ -217,11 +215,11 @@

    REFERENCES

      -
    • Computing Visibility on Terrains in External Memory. Herman -Haverkort, Laura Toma and Yi Zhuang. In ACM Journal on Experimental +
    • Computing Visibility on Terrains in External Memory. +Herman Haverkort, Laura Toma and Yi Zhuang. In ACM Journal on Experimental Algorithmics (JEA) 13 (2009).
    • -
    • Computing +
    • Computing Visibility on Terrains in External Memory. Herman Haverkort, Laura Toma and Yi Zhuang. In the Proceedings of the 9th Workshop on Algorithm Engineering and Experiments / Workshop on Analytic Algorithms @@ -236,7 +234,7 @@

      SEE ALSO

      AUTHORS

      -

      Laura Toma (Bowdoin College): ltoma@bowdoin.edu -

      Yi Zhuang (Carnegie-Mellon University): yzhuang@andrew.cmu.edu -

      William Richard (Bowdoin College): willster3021@gmail.com +

      Laura Toma (Bowdoin College): ltoma@bowdoin.edu +

      Yi Zhuang (Carnegie-Mellon University): yzhuang@andrew.cmu.edu +

      William Richard (Bowdoin College): willster3021@gmail.com

      Markus Metz diff --git a/raster/r.volume/r.volume.html b/raster/r.volume/r.volume.html index f9c20e324c1..d8eb8cfc14c 100644 --- a/raster/r.volume/r.volume.html +++ b/raster/r.volume/r.volume.html @@ -43,11 +43,11 @@

      CENTROIDS

      Attribute table linked to the vector map with centroids contains several columns:
        -
      • cat - category value (integer)
      • -
      • volume - volume value (double precision)
      • -
      • average - average value in the clump (double precision)
      • -
      • sum - sum of cell values in the clump (double precision)
      • -
      • count - number of cells with the category (integer)
      • +
      • cat - category value (integer)
      • +
      • volume - volume value (double precision)
      • +
      • average - average value in the clump (double precision)
      • +
      • sum - sum of cell values in the clump (double precision)
      • +
      • count - number of cells with the category (integer)

      @@ -139,8 +139,8 @@

      Report of geological data

      # Total Volume = 22351026655.81 -The Data Total column is the sum of the elevations for each -in each of the fields. The Total Volume is the sum +The Data Total column is the sum of the elevations for each +in each of the fields. The Total Volume is the sum multiplied by the east-west resolution times the north-south resolution. Note that the units on the volume may be difficult if the units of cell values on the input raster map and the resolution diff --git a/raster/r.walk/r.walk.html b/raster/r.walk/r.walk.html index 2523b7fc0b6..8d1412aa665 100644 --- a/raster/r.walk/r.walk.html +++ b/raster/r.walk/r.walk.html @@ -34,9 +34,9 @@

      NOTES

      where:
        -
      • T is time of movement in seconds,
      • -
      • delta S is the horizontal distance covered in meters,
      • -
      • delta H is the altitude difference in meters.
      • +
      • T is time of movement in seconds,
      • +
      • delta S is the horizontal distance covered in meters,
      • +
      • delta H is the altitude difference in meters.

      @@ -166,13 +166,13 @@

      REFERENCES

      • Aitken, R. 1977. Wilderness areas in Scotland. Unpublished Ph.D. thesis. - University of Aberdeen. + University of Aberdeen.
      • Steno Fontanari, University of Trento, Italy, Ingegneria per l'Ambiente e - il Territorio, 2000-2001. + il Territorio, 2000-2001.
      • Svilluppo di metodologie GIS per la determinazione dell'accessibilità territoriale come supporto alle decisioni nella gestione ambientale.
      • Langmuir, E. 1984. Mountaincraft and leadership. The Scottish - Sports Council/MLTB. Cordee, Leicester. + Sports Council/MLTB. Cordee, Leicester.

      SEE ALSO

      diff --git a/raster/r.water.outlet/main.c b/raster/r.water.outlet/main.c index 12135f500b7..ad060205d2f 100644 --- a/raster/r.water.outlet/main.c +++ b/raster/r.water.outlet/main.c @@ -75,8 +75,14 @@ int main(int argc, char *argv[]) G_get_window(&window); - strcpy(drain_name, opt.input->answer); - strcpy(basin_name, opt.output->answer); + if (G_strlcpy(drain_name, opt.input->answer, sizeof(drain_name)) >= + sizeof(drain_name)) { + G_fatal_error(_("Drain name <%s> is too long"), opt.input->answer); + } + if (G_strlcpy(basin_name, opt.output->answer, sizeof(basin_name)) >= + sizeof(basin_name)) { + G_fatal_error(_("Basin name <%s> is too long"), opt.output->answer); + } if (!G_scan_easting(opt.coords->answers[0], &E, G_projection())) G_fatal_error(_("Illegal east coordinate '%s'"), diff --git a/raster/r.water.outlet/r.water.outlet.html b/raster/r.water.outlet/r.water.outlet.html index 7504ec1e722..2ce1c848e89 100644 --- a/raster/r.water.outlet/r.water.outlet.html +++ b/raster/r.water.outlet/r.water.outlet.html @@ -52,7 +52,7 @@

      EXAMPLE

      -
      +
      Figure: Watershed draped over flow accumulation
      diff --git a/raster/r.watershed/front/r.watershed.html b/raster/r.watershed/front/r.watershed.html index d9a226412af..5713f611ea3 100644 --- a/raster/r.watershed/front/r.watershed.html +++ b/raster/r.watershed/front/r.watershed.html @@ -121,23 +121,23 @@

      NOTES

      Output tci raster map contains topographic index TCI, computed as -ln(α / tan(β)) where α is the cumulative +ln(α / tan(β)) where α is the cumulative upslope area draining through a point per unit contour length and -tan(β) is the local slope angle. The TCI reflects the +tan(β) is the local slope angle. The TCI reflects the tendency of water to accumulate at any point in the catchment and the tendency for gravitational forces to move that water downslope (Quinn -et al. 1991). This value will be negative if α / -tan(β) < 1. +et al. 1991). This value will be negative if α / +tan(β) < 1.

      Output spi raster map contains stream power index SPI, computed as -α * tan(β) where α is the cumulative +α * tan(β) where α is the cumulative upslope area draining through a point per unit contour length and -tan(β) is the local slope angle. The SPI reflects the +tan(β) is the local slope angle. The SPI reflects the power of water flow at any point in the catchment and the tendency for gravitational forces to move that water downslope (Moore -et al. 1991). This value will be negative if α < 0, +et al. 1991). This value will be negative if α < 0, i.e. for cells with possible surface runoff from outside of the current geographic region. @@ -323,7 +323,7 @@

      MASK and no data

      current geographic region filled with elevation values. Areas without elevation data (masked or NULL cells) are ignored. It is NOT necessary to create a raster map (or raster reclassification) -named MASK for NULL cells. Areas without elevation data will +named MASK for NULL cells. Areas without elevation data will be treated as if they are off the edge of the region. Such areas will reduce the memory necessary to run the program. Masking out unimportant areas can significantly reduce processing time if the @@ -360,11 +360,11 @@

      Further processing of output layers

    • Use a resample of the basins catchment raster map as a MASK.
      The equivalent vector map method is similar using v.select or - v.overlay. + v.overlay.
    • Use the r.cost module with a - point in the river as a starting point. + point in the river as a starting point.
    • Use the v.net.iso module - with a node in the river as a starting point. + with a node in the river as a starting point.
    • All individual river networks in the stream segments output can be @@ -398,21 +398,21 @@

      Further processing of output layers

      Given that the drainage is 8 directions numbered counter-clockwise starting from 1 in north-east direction, multiplying the output -by 45 (by 45. to get a double precision floating point raster +by 45 (by 45. to get a double precision floating point raster map in r.mapcalc) gives the directions in degrees. For most applications, zeros which indicate depressions specified by depression and negative values which indicate runoff leaving the region -should be replaced by NULL (null() in +should be replaced by NULL (null() in r.mapcalc). The following command performs these replacements:
      -r.mapcalc "drainage_degrees = if(drainage > 0, 45. * drainage, null())"
      +r.mapcalc "drainage_degrees = if(drainage > 0, 45. * drainage, null())"
       
      Alternatively, the user can use the -a flag or later the -abs() function in +abs() function in r.mapcalc if the runoff is leaving the region. @@ -424,7 +424,7 @@

      Convert r.watershed streams map output to a vector map

      If you want a detailed stream network, set the threshold option small to create lots of catchment basins, as only one stream is presented -per catchment. The r.to.vect -v flag preserves the catchment +per catchment. The r.to.vect -v flag preserves the catchment ID as the vector category number.
      @@ -471,7 +471,7 @@ 

      Convert r.watershed streams map output to a vector map

      r.mapcalc 'MASK = if(!isnull(elevation.dem))' r.mapcalc "rwater.course = \ - if( abs(rwater.accum) > $mean_of_abs, \ + if( abs(rwater.accum) > $mean_of_abs, \ abs(rwater.accum), \ null() )" r.colors -g rwater.course col=bcyr @@ -515,45 +515,44 @@

      REFERENCES

      Proceedings of International Geographic Information Systems (IGIS) Symposium '89, pp 275-281 (Baltimore, MD, 18-19 March 1989).
      URL: -http://chuck.ehlschlaeger.info/older/IGIS/paper.html +http://chuck.ehlschlaeger.info/older/IGIS/paper.html
    • Holmgren P. (1994). Multiple flow direction algorithms for runoff modelling in grid based elevation models: An empirical evaluation. Hydrological Processes Vol 8(4), 327-334.
      -DOI: 10.1002/hyp.3360080405 +DOI: 10.1002/hyp.3360080405
    • Kinner D., Mitasova H., Harmon R., Toma L., Stallard R. (2005). GIS-based Stream Network Analysis for The Chagres River Basin, Republic of Panama. The Rio Chagres: A Multidisciplinary Profile of a Tropical Watershed, R. Harmon (Ed.), Springer/Kluwer, p.83-95.
      URL: -http://fatra.cnr.ncsu.edu/~hmitaso/measwork/panama/panama.html +http://fatra.cnr.ncsu.edu/~hmitaso/measwork/panama/panama.html
    • McCool et al. (1987). Revised Slope Steepness Factor for the Universal -Soil Loss Equation, Transactions of the ASAE Vol 30(5). +Soil Loss Equation, Transactions of the ASAE Vol 30(5).
    • Metz M., Mitasova H., Harmon R. (2011). Efficient extraction of drainage networks from massive, radar-based elevation models with least cost path search, Hydrol. Earth Syst. Sci. Vol 15, 667-678.
      -DOI: 10.5194/hess-15-667-2011 +DOI: 10.5194/hess-15-667-2011
    • Moore I.D., Grayson R.B., Ladson A.R. (1991). Digital terrain modelling: a review of hydrogical, geomorphological, and biological applications, Hydrological Processes, Vol 5(1), 3-30
      -DOI: 10.1002/hyp.3360050103 +DOI: 10.1002/hyp.3360050103
    • Quinn P., K. Beven K., Chevallier P., Planchon O. (1991). The prediction of hillslope flow paths for distributed hydrological modelling using Digital Elevation Models, Hydrological Processes Vol 5(1), p.59-79.
      -DOI: 10.1002/hyp.3360050106 +DOI: 10.1002/hyp.3360050106
    • Weltz M. A., Renard K.G., Simanton J. R. (1987). Revised Universal Soil Loss Equation for Western Rangelands, U.S.A./Mexico Symposium of Strategies for Classification and Management of Native Vegetation for -Food Production In Arid Zones (Tucson, AZ, 12-16 Oct. 1987). -
    • +Food Production In Arid Zones (Tucson, AZ, 12-16 Oct. 1987).

    SEE ALSO

    diff --git a/raster/r.watershed/ram/do_cum.c b/raster/r.watershed/ram/do_cum.c index c882b82bb46..0bebc9dee46 100644 --- a/raster/r.watershed/ram/do_cum.c +++ b/raster/r.watershed/ram/do_cum.c @@ -218,6 +218,8 @@ int do_cum(void) } } G_free(astar_pts); + G_free(contour); + G_free(dist_to_nbr); return 0; } @@ -632,6 +634,7 @@ int do_cum_mfd(void) G_free(dist_to_nbr); G_free(weight); + G_free(contour); return 0; } diff --git a/raster/r.watershed/seg/do_cum.c b/raster/r.watershed/seg/do_cum.c index bd5bbebbed9..f7918ecdb5c 100644 --- a/raster/r.watershed/seg/do_cum.c +++ b/raster/r.watershed/seg/do_cum.c @@ -232,6 +232,8 @@ int do_cum(void) G_percent(do_points, do_points, 1); /* finish it */ seg_close(&astar_pts); + G_free(dist_to_nbr); + G_free(contour); return 0; } diff --git a/raster/r.what.color/r.what.color.html b/raster/r.what.color/r.what.color.html index 607c0732613..9113aa8b673 100644 --- a/raster/r.what.color/r.what.color.html +++ b/raster/r.what.color/r.what.color.html @@ -4,7 +4,7 @@

    DESCRIPTION

    category values in a raster input map.

    Values may be specified either using the values= option, or by specifying the -i flag and passing the values on -stdin, one per line. +stdin, one per line.

    For each value which is specified, a line of output will be generated consisting of the category value followed by the color, e.g.: @@ -53,8 +53,8 @@

    DESCRIPTION

    Common formats:

      -
    • Tcl/Tk: format="#%02x%02x%02x" -
    • WxPython: format='"#%02x%02x%02x"' or format='"(%d,%d,%d)"' +
    • Tcl/Tk: format="#%02x%02x%02x"
    • +
    • WxPython: format='"#%02x%02x%02x"' or format='"(%d,%d,%d)"'

    SEE ALSO

    diff --git a/raster/r.what/r.what.html b/raster/r.what/r.what.html index 5d8d10913af..c0219d15cdf 100644 --- a/raster/r.what/r.what.html +++ b/raster/r.what/r.what.html @@ -7,7 +7,7 @@

    DESCRIPTION

    with each location.

    The input coordinates can be entered directly on the command line -via coordinates parameter, or redirected via stdin +via coordinates parameter, or redirected via stdin from an input text file, script, or piped from another program (like v.out.ascii). Coordinates can be given also as a vector points map (points). @@ -17,7 +17,7 @@

    DESCRIPTION

    locations and labels.

    Each line of the input consists of an easting, a northing, and an optional label, which are separated by spaces. In interactive mode, the word -"end" must be typed after the last pair of input coordinates. +"end" must be typed after the last pair of input coordinates.

    r.what output consists of the input geographic location and label, and, for each user-named raster map layer, the category value, and (if the -f label flag is specified) the category label associated with @@ -112,8 +112,8 @@

    Input from a text file containing coordinates

    Input from standard input on the command line

    -Input coordinates may be given directly from standard input (stdin), -for example (input data appears between the "EOF" markers): +Input coordinates may be given directly from standard input (stdin), +for example (input data appears between the "EOF" markers):
     r.what map=landuse96_28m,aspect << EOF
    @@ -134,7 +134,7 @@ 

    Input from standard input on the command line

    Input coordinates piped from another program

    The input coordinates may be "piped" from the standard output -(stdout) of another program. In the next example, vector +(stdout) of another program. In the next example, vector point coordinates are piped from the v.out.ascii module. diff --git a/raster/rasterintro.html b/raster/rasterintro.html index 93d05f35410..51770eceb00 100644 --- a/raster/rasterintro.html +++ b/raster/rasterintro.html @@ -18,17 +18,17 @@

    Raster maps in general

    As a general rule in GRASS GIS:
    1. Raster output maps have their bounds and resolution equal to those -of the current computational region. +of the current computational region.
    2. Raster input maps are automatically cropped/padded and rescaled -(using nearest-neighbour resampling) to match the current region. +(using nearest-neighbour resampling) to match the current region.
    3. Raster input maps are automatically masked if a raster map named MASK exists. The MASK is only applied when reading maps - from the disk. + from the disk.
    There are a few exceptions to this: -r.in.* programs read the data cell-for-cell, with no resampling. When +r.in.* programs read the data cell-for-cell, with no resampling. When reading non-georeferenced data, the imported map will usually have its lower-left corner at (0,0) in the project's coordinate system; the user needs to use r.region to "place" the imported map. @@ -198,8 +198,8 @@

    Raster map statistics

    (d.polar). Univariate statistics (r.univar) and -reports are also available (r.report,r.stats, r.volume). +reports are also available (r.report, +r.stats, r.volume). Since r.univar may be slow for extended statistics these can be calculated using @@ -267,7 +267,7 @@

    2D raster maps

    • 32bit signed integer (CELL),
    • single-precision floating-point (FCELL), and
    • -
    • double-precision floating-point (DCELL). +
    • double-precision floating-point (DCELL).
    In most GRASS GIS resources, 2D raster maps are usually called "raster" maps. @@ -295,16 +295,16 @@

    Raster compression

    All GRASS GIS raster map types are by default ZSTD compressed if available, otherwise ZLIB compressed. Through the environment variable -GRASS_COMPRESSOR the compression method can be set to RLE, +GRASS_COMPRESSOR the compression method can be set to RLE, ZLIB, LZ4, BZIP2, or ZSTD.

    Important: the NULL file compression can be turned off with -export GRASS_COMPRESS_NULLS=0. Raster maps with NULL file +export GRASS_COMPRESS_NULLS=0. Raster maps with NULL file compression can only be opened with GRASS GIS 7.2.0 or later. NULL file compression for a particular raster map can be managed with r.null -z.

    Integer (CELL type) raster maps can be compressed with RLE if -the environment variable GRASS_COMPRESSOR exists and is set to +the environment variable GRASS_COMPRESSOR exists and is set to RLE. However, this is not recommended.

    Floating point (FCELL, DCELL) raster maps never use RLE compression; @@ -323,7 +323,7 @@

    Raster compression

    compression level which is the best compromise between speed and compression ratio, also when compared to other available compression methods. Valid levels are in the range [1, 9] and can be set with the -environment variable GRASS_ZLIB_LEVEL. +environment variable GRASS_ZLIB_LEVEL.
    LZ4
    LZ4 is a very fast compression method, about as fast as no compression. Decompression is also very fast. The compression ratio is @@ -361,4 +361,5 @@

    See also

  • Introduction into temporal data processing
  • Database management
  • Projections and spatial transformations
  • +
  • Graphical User Interface
  • diff --git a/raster3d/r3.cross.rast/r3.cross.rast.html b/raster3d/r3.cross.rast/r3.cross.rast.html index cd60934816a..71aad7f05a3 100644 --- a/raster3d/r3.cross.rast/r3.cross.rast.html +++ b/raster3d/r3.cross.rast/r3.cross.rast.html @@ -1,6 +1,6 @@

    DESCRIPTION

    -This module creates a cross section 2D map from one 3D raster volume +r3.cross.rast creates a cross section 2D map from one 3D raster volume map based on a 2D elevation map. It checks if the value of the elevation map is located in the z-coordinate space of the 3D map. If so, the 3D voxel value for this position is transferred to the related diff --git a/raster3d/r3.flow/r3.flow.html b/raster3d/r3.flow/r3.flow.html index 7a7c60fd21d..96eff36813d 100644 --- a/raster3d/r3.flow/r3.flow.html +++ b/raster3d/r3.flow/r3.flow.html @@ -54,9 +54,9 @@

    EXAMPLES

    g.region res=25 res3=25 t=100 b=0 n=1000 s=0 w=0 e=1000 -p3 # now create the input raster maps for a confined aquifer -r3.mapcalc expression="phead = if(row() == 1 && depth() == 4, 50, 40)" -r3.mapcalc expression="status = if(row() == 1 && depth() == 4, 2, 1)" -r3.mapcalc expression="well = if(row() == 20 && col() == 20 && depth() == 2, -0.25, 0)" +r3.mapcalc expression="phead = if(row() == 1 && depth() == 4, 50, 40)" +r3.mapcalc expression="status = if(row() == 1 && depth() == 4, 2, 1)" +r3.mapcalc expression="well = if(row() == 20 && col() == 20 && depth() == 2, -0.25, 0)" r3.mapcalc expression="hydcond = 0.00025" r3.mapcalc expression="syield = 0.0001" r.mapcalc expression="recharge = 0.0" @@ -98,4 +98,4 @@

    SEE ALSO

    AUTHOR

    -Anna Petrasova, NCSU OSGeoREL, developed during GSoC 2014. +Anna Petrasova, NCSU GeoForAll Lab, developed during GSoC 2014. diff --git a/raster3d/r3.gradient/r3.gradient.html b/raster3d/r3.gradient/r3.gradient.html index a0fb431a620..e9f588ac103 100644 --- a/raster3d/r3.gradient/r3.gradient.html +++ b/raster3d/r3.gradient/r3.gradient.html @@ -24,5 +24,5 @@

    SEE ALSO

    AUTHOR

    -Anna Petrasova, NCSU OSGeoREL, +Anna Petrasova, NCSU GeoForAll Lab, developed during GSoC 2014. diff --git a/raster3d/r3.gwflow/r3.gwflow.html b/raster3d/r3.gwflow/r3.gwflow.html index 7ced5f01ba8..8df4a38140c 100644 --- a/raster3d/r3.gwflow/r3.gwflow.html +++ b/raster3d/r3.gwflow/r3.gwflow.html @@ -4,7 +4,7 @@

    DESCRIPTION

    confined groundwater flow in three dimensions based on volume maps and the current 3D region settings. All initial- and boundary-conditions must be provided as volume maps. -The unit of the current coordinate refernce system must be meters. +The unit of the current coordinate reference system must be meters.

    This module is sensitive to mask settings. All cells which are outside the mask are ignored and handled as no flow boundaries.

    The module calculates the piezometric head and optionally the water @@ -78,9 +78,9 @@

    EXAMPLE 1

    g.region res=25 res3=25 t=100 b=0 n=1000 s=0 w=0 e=1000 -p3 #now create the input raster maps for a confined aquifer -r3.mapcalc expression="phead = if(row() == 1 && depth() == 4, 50, 40)" -r3.mapcalc expression="status = if(row() == 1 && depth() == 4, 2, 1)" -r3.mapcalc expression="well = if(row() == 20 && col() == 20 && depth() == 2, -0.25, 0)" +r3.mapcalc expression="phead = if(row() == 1 && depth() == 4, 50, 40)" +r3.mapcalc expression="status = if(row() == 1 && depth() == 4, 2, 1)" +r3.mapcalc expression="well = if(row() == 20 && col() == 20 && depth() == 2, -0.25, 0)" r3.mapcalc expression="hydcond = 0.00025" r3.mapcalc expression="syield = 0.0001" r.mapcalc expression="recharge = 0.0" @@ -105,14 +105,14 @@

    EXAMPLE 2

    g.region res=15 res3=15 t=500 b=0 n=1000 s=0 w=0 e=1000 #now create the input raster maps for a confined aquifer -r3.mapcalc expression="phead = if(col() == 1 && depth() == 33, 50, 40)" -r3.mapcalc expression="status = if(col() == 1 && depth() == 33, 2, 1)" -r3.mapcalc expression="well = if(row() == 20 && col() == 20 && depth() == 3, -0.25, 0)" -r3.mapcalc expression="well = if(row() == 50 && col() == 50 && depth() == 3, -0.25, well)" +r3.mapcalc expression="phead = if(col() == 1 && depth() == 33, 50, 40)" +r3.mapcalc expression="status = if(col() == 1 && depth() == 33, 2, 1)" +r3.mapcalc expression="well = if(row() == 20 && col() == 20 && depth() == 3, -0.25, 0)" +r3.mapcalc expression="well = if(row() == 50 && col() == 50 && depth() == 3, -0.25, well)" r3.mapcalc expression="hydcond = 0.0025" -r3.mapcalc expression="hydcond = if(depth() < 30 && depth() > 23 && col() < 60, 0.000025, hydcond)" -r3.mapcalc expression="hydcond = if(depth() < 20 && depth() > 13 && col() > 7, 0.000025, hydcond)" -r3.mapcalc expression="hydcond = if(depth() < 10 && depth() > 7 && col() < 60, 0.000025, hydcond)" +r3.mapcalc expression="hydcond = if(depth() < 30 && depth() > 23 && col() < 60, 0.000025, hydcond)" +r3.mapcalc expression="hydcond = if(depth() < 20 && depth() > 13 && col() > 7, 0.000025, hydcond)" +r3.mapcalc expression="hydcond = if(depth() < 10 && depth() > 7 && col() < 60, 0.000025, hydcond)" r3.mapcalc expression="syield = 0.0001" r3.gwflow solver=cg phead=phead statuyield=status hc_x=hydcond hc_y=hydcond \ diff --git a/raster3d/r3.in.ascii/r3.in.ascii.html b/raster3d/r3.in.ascii/r3.in.ascii.html index 0316fa76029..6d20844e70c 100644 --- a/raster3d/r3.in.ascii/r3.in.ascii.html +++ b/raster3d/r3.in.ascii/r3.in.ascii.html @@ -16,8 +16,8 @@

    NOTES

    that is visualized in the following picture, independently from the specified ordering in the ASCII input file:
    -
    -
    [SDF][SDF][SDF][SDF]
    The sweep-line.
    +
    +
    @@ -47,7 +47,7 @@

    Format

    The supported row/depth ordering is documented in the r3.out.ascii manual page. The order of the data in the input file does not specify the data order in the generated output 3D raster map which is in any case -north -> south, west -> east, bottom -> top order. +north -> south, west -> east, bottom -> top order. So dependent on the order information the data is automatically imported into the correct internal coordinate system.

    The version and order options are not mandatory. In case no version and @@ -61,7 +61,7 @@

    EXAMPLES

    4x3x2 sample. Note in case no specific ordering is specified in the input file the upper-left (NW) corner of the bottom level comes first. The according -order option is: nsbt for north -> south, bottom -> top ordering. This is +order option is: nsbt for north -> south, bottom -> top ordering. This is identical with r.in.ascii for single level data. So the y coordinate is 0 at the northern edge. diff --git a/raster3d/r3.in.bin/r3.in.bin.html b/raster3d/r3.in.bin/r3.in.bin.html index a1735a49d6f..e3c58afc5e0 100644 --- a/raster3d/r3.in.bin/r3.in.bin.html +++ b/raster3d/r3.in.bin/r3.in.bin.html @@ -12,8 +12,8 @@

    DESCRIPTION

    NOTES

    -The write order of the rows (north->south to south->north) and -the write order of the depths (bottom->top to top->bottom) can be switched. +The write order of the rows (north->south to south->north) and +the write order of the depths (bottom->top to top->bottom) can be switched.

    Have a look at r3.out.ascii to manual page that describes the internal layout of the 3D raster maps and the supported diff --git a/raster3d/r3.in.lidar/r3.in.lidar.html b/raster3d/r3.in.lidar/r3.in.lidar.html index 2edddfceb9e..07d30268f6f 100644 --- a/raster3d/r3.in.lidar/r3.in.lidar.html +++ b/raster3d/r3.in.lidar/r3.in.lidar.html @@ -26,17 +26,17 @@

    NOTES

  • This module is new and partially experimental. Please don't rely on its interface and be critical towards its outputs. - Please report issues on the mailing list or in the bug tracker. + Please report issues on the mailing list or in the bug tracker
  • .
  • No reprojection is performed, you need to reproject ahead or - use a GRASS project with the coordinate system that matches that of the data. + use a GRASS project with the coordinate system that matches that of the data.
  • Some temporary maps are created but not cleaned up. Use of - --overwrite might be necessary even when not desired. + --overwrite might be necessary even when not desired.
  • Expects points to have intensity and causing random (undefined) result for related outputs (sum, mean, proportional_sum) - when the intensity is not present but the outputs were requested. + when the intensity is not present but the outputs were requested.
  • EXAMPLES

    @@ -100,7 +100,7 @@

    Complete workflow for vertical structure analysis

    The class_filter option should be also provided if only part of -the points is analyzed, for example class_filter=3,4,5 would be +the points is analyzed, for example class_filter=3,4,5 would be used for low, medium, and high vegetation if the LAS file follows the usedstandard ASPRS class numbers. @@ -114,7 +114,7 @@

    Complete workflow for vertical structure analysis

    g.region raster=points_n -p3 -class_filter=2 is used to limit +class_filter=2 is used to limit
     r.in.lidar input=points.las output=ground_mean method=mean class_filter=2
    @@ -137,7 +137,7 @@ 

    Complete workflow for vertical structure analysis

    Now we need to determine upper vertical limit for the 3D raster -(the top value from g.region -p3). This can be potentially +(the top value from g.region -p3). This can be potentially done with lower resolution. The -d flag ensures that the ground raster will be used in its actual resolution regardless of the resolution of the output. @@ -174,14 +174,14 @@

    REFERENCES

    Processing UAV and lidar point clouds in GRASS GIS. XXIII ISPRS Congress 2016 [ISPRS Archives, -ResearchGate] +ResearchGate]
  • -ASPRS LAS format +ASPRS LAS format
  • -LAS library +LAS library
  • -LAS library C API documentation +LAS library C API documentation
  • SEE ALSO

    diff --git a/raster3d/r3.in.v5d/r3.in.v5d.html b/raster3d/r3.in.v5d/r3.in.v5d.html index be33aec5b32..36644159fa5 100644 --- a/raster3d/r3.in.v5d/r3.in.v5d.html +++ b/raster3d/r3.in.v5d/r3.in.v5d.html @@ -4,7 +4,7 @@

    DESCRIPTION

    one variable and one time step). Otherwise, only first variable and timestep from 4/5D V5D file will be imported. -

    Vis5D is a system +

    Vis5D is a system for interactive visualization of large 5D gridded data sets such as those produced by numerical weather models. The user can make isosurfaces, contour line slices, colored slices, volume renderings, etc. of data in a 3D raster map, diff --git a/raster3d/r3.mask/r3.mask.html b/raster3d/r3.mask/r3.mask.html index 3e182fc0b49..094d81bd89e 100644 --- a/raster3d/r3.mask/r3.mask.html +++ b/raster3d/r3.mask/r3.mask.html @@ -1,9 +1,16 @@

    DESCRIPTION

    -File map is used as reference file. +r3.mask facilitates the creation of a raster3d +"MASK" map to control raster3d operations. + +The raster3d map is used as the reference map. + Cells in the mask are marked as "mask out" if the corresponding cell in map contains a value in the range specified with maskvalues. -

    Before a new 3D-mask can be created the existing mask has to be removed + +

    NOTES

    + +Before a new 3D-mask can be created the existing mask has to be removed with g.remove.

    SEE ALSO

    diff --git a/raster3d/r3.mkdspf/r3.mkdspf.html b/raster3d/r3.mkdspf/r3.mkdspf.html index e73217df618..b8a4f584f60 100644 --- a/raster3d/r3.mkdspf/r3.mkdspf.html +++ b/raster3d/r3.mkdspf/r3.mkdspf.html @@ -3,10 +3,10 @@

    DESCRIPTION

    Creates a display file from an existing grid3 file according to specified threshold levels. The display file is a display list of polygons that represent isosurfaces of the data volume. If -specific levels are given, additional optional parameters -are ignored. Min or max may be used alone or together -to specify a sub-range of the data. The step -parameter is given precedence over tnum. +specific levels are given, additional optional parameters +are ignored. Min or max may be used alone or together +to specify a sub-range of the data. The step +parameter is given precedence over tnum.

    Flags:

    @@ -53,7 +53,7 @@

    NOTES

    EXAMPLES

    -With grid3 data (phdata) in the range 3-7, +With grid3 data (phdata) in the range 3-7, we only want to see isosurface values for the range 4-6. Any of these commands will produce the same results:
    diff --git a/raster3d/r3.null/r3.null.html b/raster3d/r3.null/r3.null.html
    index 237b8a785f8..f4ef294914d 100644
    --- a/raster3d/r3.null/r3.null.html
    +++ b/raster3d/r3.null/r3.null.html
    @@ -1,6 +1,6 @@
     

    DESCRIPTION

    -Modifies the NULL values of a 3D raster map. +r3.null modifies the NULL values of a 3D raster map.

    SEE ALSO

    diff --git a/raster3d/r3.out.ascii/r3.out.ascii.html b/raster3d/r3.out.ascii/r3.out.ascii.html index 8f5f0ed0f11..5cb54e7fef9 100644 --- a/raster3d/r3.out.ascii/r3.out.ascii.html +++ b/raster3d/r3.out.ascii/r3.out.ascii.html @@ -72,8 +72,8 @@

    NOTES

    The internal storage scheme of 3D raster maps is visualized in the following picture:

    -
    -
    The volume coordinate system and tile layout of the imported voxel map
    +
    +
    diff --git a/raster3d/r3.out.bin/r3.out.bin.html b/raster3d/r3.out.bin/r3.out.bin.html index e2c2ee3621c..16af9096bc2 100644 --- a/raster3d/r3.out.bin/r3.out.bin.html +++ b/raster3d/r3.out.bin/r3.out.bin.html @@ -7,8 +7,8 @@

    DESCRIPTION

    NOTES

    -The write order of the rows (north->south to south->north) and -the write order of the depths (bottom->top to top->bottom) can be switched. +The write order of the rows (north->south to south->north) and +the write order of the depths (bottom->top to top->bottom) can be switched.

    The region parameters are printed to stderr when setting the verbose flag. Export of little and big endian byte order is supported. diff --git a/raster3d/r3.out.netcdf/r3.out.netcdf.html b/raster3d/r3.out.netcdf/r3.out.netcdf.html index 2dc69701d4d..d4be2831068 100644 --- a/raster3d/r3.out.netcdf/r3.out.netcdf.html +++ b/raster3d/r3.out.netcdf/r3.out.netcdf.html @@ -22,9 +22,9 @@

    NOTES

    Spatial coordinates are exported as cell centered coordinates. The projection can be optionally stored in the metadata as crs attributes . The netCDF projection metadata storage follows the spatial_ref GDAL/netCDF suggestion -here +here and the netCDF CF 1.6 convention -here +here using WKT projection information. Additional a PROJ string is stored in the crs attribute section. The export of projection parameters is suppressed when the XY-projection is set. diff --git a/raster3d/r3.out.v5d/r3.out.v5d.html b/raster3d/r3.out.v5d/r3.out.v5d.html index 50c3280b55d..f541cb86225 100644 --- a/raster3d/r3.out.v5d/r3.out.v5d.html +++ b/raster3d/r3.out.v5d/r3.out.v5d.html @@ -5,7 +5,7 @@

    DESCRIPTION

    parameter is the name of a V5D file which will be written in the current working directory. -

    Vis5D is a system +

    Vis5D is a system for interactive visualization of large 5D gridded data sets such as those produced by numerical weather models. The user can make isosurfaces, contour line slices, colored slices, volume renderings, etc. of data in a 3D raster map, diff --git a/raster3d/r3.out.vtk/r3.out.vtk.html b/raster3d/r3.out.vtk/r3.out.vtk.html index d5ea5245651..4bcebada9d6 100644 --- a/raster3d/r3.out.vtk/r3.out.vtk.html +++ b/raster3d/r3.out.vtk/r3.out.vtk.html @@ -18,9 +18,9 @@

    NOTES

    map to be written in the VTK-ASCII file. Each celldata is named as the 3D raster map it represents. The user can visualize this file with the -VTK Toolkit, -ParaView and -MayaVi which are based on +VTK Toolkit, +ParaView and +MayaVi which are based on VTK. In case of 3D raster map with partially no data, the threshold filter in ParaView can be used to visualize the valid data. Just filter all data which is greater/lesser than the chosen null value in the VTK-ASCII file. diff --git a/raster3d/r3.showdspf/r3.showdspf.html b/raster3d/r3.showdspf/r3.showdspf.html index 97f1322a882..c1e3a43e338 100644 --- a/raster3d/r3.showdspf/r3.showdspf.html +++ b/raster3d/r3.showdspf/r3.showdspf.html @@ -42,7 +42,7 @@

    DESCRIPTION

    E(x,y,z)int# end display along (x,y,z)axis # S int# specular highlight control R resets display along axis to show all data - F grid3name colortablename load new color file + F grid3name colortablename load new color file C toggles the clear flag c clears the display (no thresholds) @@ -72,14 +72,14 @@

    Hints:

    the mouse pointer in the graphics window and drag with the left mouse to rotate the bounding box. To zoom in and out, drag right or left with the middle mouse. When satisfied with the new viewing -position, click with the right mouse. +position, click with the right mouse.
  • To quickly view a series of isosurfaces, enter a series of + or - -characters, i.e. +++++++ +characters, i.e. +++++++
  • Scripts using above commands on separate lines may be directed to r3.showdspf as standard input. -Use the # sign as the first character on a line to indicate a comment. +Use the # sign as the first character on a line to indicate a comment.
  • EXAMPLES

    @@ -90,23 +90,23 @@

    EXAMPLES

    List available thresholds with ?. Use l to select isosurfaces (available number can be adjusted with r3.mkdspf) and L to display:
    -l 1 2 3 4 5
    L
    +l 1 2 3 4 5
    L

    To select and display a single threshold (here: 2), use:
    -t 2 +t 2

    To select and display a range of thresholds (here: 3-5), use:
    -T 3 5
    -D
    +T 3 5
    +D

    To draw a box, enter
    -p
    +p
    the p# to plot a selected wall (here top wall):
    -p1 +p1

    Tp draw a cut-off box, define it's position
    -Ex20
    -p
    +Ex20
    +p
    Here Ex20 defines the x coordinate of the end of the box.

    In general - p draws a side of a box, E, B, define where that box starts or @@ -118,7 +118,7 @@

    EXAMPLES

    box).

    To draw a fence, a sequence like this would be needed
    -Ex10
    +Ex10
    p5
    Ex15
    p5
    @@ -126,15 +126,15 @@

    EXAMPLES

    p5
    Ex25
    p5
    -
    + or the same would be
    -Bx10
    +Bx10
    p6
    Bx15
    p6
    Bx20
    -p6
    +p6

    The p is needed for the fence diagram, solids and boxes. diff --git a/raster3d/r3.showdspf/r3.showdspf_opengl_mods.html b/raster3d/r3.showdspf/r3.showdspf_opengl_mods.html index 09f15f9b2ed..571787ae730 100644 --- a/raster3d/r3.showdspf/r3.showdspf_opengl_mods.html +++ b/raster3d/r3.showdspf/r3.showdspf_opengl_mods.html @@ -2,7 +2,7 @@ - modifications made to <em>r3.showdspf</em> + modifications made to r3.showdspf @@ -12,7 +12,7 @@ IrisGL (running only on SGI workstations) to OpenGL (running on any OpenGL-capable UNIX workstation)

    The following files were changed (and perhaps renamed during conversion)
    -do_color.c -> do_color_ogl.c
    +do_color.c -> do_color_ogl.c
    draw_cap.c -> draw_cap_ogl.c
    draw_polys.c -> draw_polys_ogl.c
    draw_cappolys.c -> draw_cappolys_ogl.c
    @@ -20,27 +20,27 @@ init_caps.c -> init_caps.c
    vizual.h -> vizual.h
    Gmakefile -> Gmakefile
    -

    +

    The following files are new files
    -
    new_init_graphics.c
    +new_init_graphics.c
    kns_defines.h
    kns_globals.h
    togif.c
    togif.h
    -

    +

    The following files are unchanged:
    -
    cap_data.c
    +cap_data.c
    init_caps.c
    debugf.c
    thresh_array.c
    make_header.c
    r3_data.c
    r3_find.c
    -

    +

    The following files removed since they were made unnecessary
    -
    init_graphics.c
    +init_graphics.c
    dump_rect.c -> functions moved to new_init_graphics.c
    -
    axes2.c
    +
    axes2.c

    *** Disclaimer ***
    The porting effort from IrisGL to OpenGL was performed with the goal of just getting things to run with diff --git a/raster3d/r3.stats/r3.stats.html b/raster3d/r3.stats/r3.stats.html index dd9978f67d2..1e68ab3af65 100644 --- a/raster3d/r3.stats/r3.stats.html +++ b/raster3d/r3.stats/r3.stats.html @@ -17,7 +17,7 @@

    NOTES

    equal value groups effect the memory consumption and the calculation time. The user can expect a huge time consumption to calculate the equal value groups (flag -e) if large region settings occur for maps which -have many equal value groups (> 100000). +have many equal value groups (> 100000).

    EXAMPLES

    @@ -61,7 +61,7 @@

    Generic example

    r3.stats input=volmap nsteps=10 #the result should look like this - num | minimum <= value | value < maximum | volume | perc | cell count + num | minimum <= value | value < maximum | volume | perc | cell count 1 1.000000000 1.900000000 60000000.000 10.00000 60 2 1.900000000 2.800000000 60000000.000 10.00000 60 3 2.800000000 3.700000000 60000000.000 10.00000 60 diff --git a/raster3d/r3.support/r3.support.html b/raster3d/r3.support/r3.support.html index 40f39398f67..6c7eec196f0 100644 --- a/raster3d/r3.support/r3.support.html +++ b/raster3d/r3.support/r3.support.html @@ -5,7 +5,7 @@

    DESCRIPTION

    NOTES

    -Freeform metadata information is stored in a "hist" file which may be +Freeform metadata information is stored in a "hist" file which may be appended to by using the history option. Currently this is limited to text with a maximum line length of 78 characters. Any input larger than this will be wrapped to the next line. diff --git a/raster3d/r3.to.rast/r3.to.rast.html b/raster3d/r3.to.rast/r3.to.rast.html index 4eb92d7a7b2..c6e52b922aa 100644 --- a/raster3d/r3.to.rast/r3.to.rast.html +++ b/raster3d/r3.to.rast/r3.to.rast.html @@ -26,7 +26,7 @@

    Map type conversions

    The type option is especially advantageous when the 3D raster map stores categories (which need to be stored as floating point numbers) and the 2D raster map should be also categorical, i.e. use integers. -The type is set to CELL in this case. +The type is set to CELL in this case.

    Modifying the values

    @@ -55,7 +55,7 @@

    NOTES

    The number of slices is equal to the number of depths.

    -To round floating point values to integers when using type=CELL, +To round floating point values to integers when using type=CELL, the add option should be set to 0.5.

    SEE ALSO

    @@ -69,5 +69,5 @@

    SEE ALSO

    AUTHORS

    -Sören Gebbert -Vaclav Petras, NCSU GeoForAll Lab +Sören Gebbert
    +Vaclav Petras, NCSU GeoForAll Lab diff --git a/raster3d/raster3dintro.html b/raster3d/raster3dintro.html index b51748b4dd8..02fb3744e65 100644 --- a/raster3d/raster3dintro.html +++ b/raster3d/raster3dintro.html @@ -21,7 +21,7 @@

    3D raster maps in general

    r3.retile after import or creation.
    -
    +
    The 3D raster map coordinate system and the internal tile layout of the RASTER3D library @@ -174,9 +174,9 @@

    Working with 3D visualization software

    GRASS GIS 3D raster maps can be exported to VTK using r3.out.vtk. VTK files can be visualized with the -VTK Toolkit, -Paraview and -MayaVi. +VTK Toolkit, +Paraview and +MayaVi. Moreover, GRASS GIS 2D raster maps can be exported to VTK with r.out.vtk and GRASS GIS vector maps can be exported to VTK with @@ -184,7 +184,7 @@

    Working with 3D visualization software

    Alternatively, GRASS 3D raster maps can be imported and exported from/to -Vis5D +Vis5D (r3.in.v5d, r3.out.v5d). @@ -203,6 +203,7 @@

    See also

  • Introduction into image processing
  • Introduction into temporal data processing
  • Projections and spatial transformations
  • +
  • Graphical User Interface
  • wxGUI 3D View Mode
  • m.nviz.image
  • diff --git a/renovate.json5 b/renovate.json5 index b5c0aeba9b8..12451b04103 100644 --- a/renovate.json5 +++ b/renovate.json5 @@ -5,11 +5,11 @@ ":semanticCommits", ":semanticCommitTypeAll(CI)", - // allows to use comments starting with + // allows using comments starting with // "# renovate: " to update _VERSION // environment variables in GitHub Action files. "customManagers:githubActionsVersions", - // allows to use comments starting with + // allows using comments starting with // "# renovate: " to update _VERSION // ENV or ARG in a Dockerfile. "customManagers:dockerfileVersions", diff --git a/scripts/d.correlate/d.correlate.html b/scripts/d.correlate/d.correlate.html index bf6add70fca..76d00d6f5f4 100644 --- a/scripts/d.correlate/d.correlate.html +++ b/scripts/d.correlate/d.correlate.html @@ -8,7 +8,7 @@

    DESCRIPTION

    The results are displayed in the active display frame on the user's graphics monitor. d.correlate erases the active frame before displaying results. If no graphics monitor -is open, a file map.png is generated in the current directory. +is open, a file map.png is generated in the current directory.

    NOTES

    @@ -24,7 +24,7 @@

    EXAMPLE

    -Scatterplot of two LANDSAT TM7 channels
    +Scatterplot of two LANDSAT TM7 channels
    Scatterplot of two LANDSAT TM7 channels
    diff --git a/scripts/d.correlate/d.correlate.py b/scripts/d.correlate/d.correlate.py index 1068733a11b..af0f1840356 100755 --- a/scripts/d.correlate/d.correlate.py +++ b/scripts/d.correlate/d.correlate.py @@ -54,7 +54,7 @@ def main(): os.environ["GRASS_RENDER_FILE_READ"] = "TRUE" - colors = "red black blue green gray violet".split() + colors = ["red", "black", "blue", "green", "gray", "violet"] line = 2 iloop = 0 jloop = 0 diff --git a/scripts/d.out.file/d.out.file.html b/scripts/d.out.file/d.out.file.html index ca3516257e5..1e1429c5025 100644 --- a/scripts/d.out.file/d.out.file.html +++ b/scripts/d.out.file/d.out.file.html @@ -18,4 +18,4 @@

    SEE ALSO

    AUTHOR

    -Anna Petrasova, NCSU OSGeoREL +Anna Petrasova, NCSU GeoForAll Lab diff --git a/scripts/d.rast.edit/d.rast.edit.html b/scripts/d.rast.edit/d.rast.edit.html index 75233316e38..cde4f96947a 100644 --- a/scripts/d.rast.edit/d.rast.edit.html +++ b/scripts/d.rast.edit/d.rast.edit.html @@ -77,8 +77,8 @@

    EXAMPLE

    g.region raster=elev_lid792_1m -p # pan to area of interest and edit raster cells (I used "102" as value to modify cells -# Use: File > Save to save -# then: File > Exit +# Use: File > Save to save +# then: File > Exit d.rast.edit input=elev_lid792_1m output=elev_lid792_1m_modified # comparison of raster statistics diff --git a/scripts/d.rast.leg/d.rast.leg.html b/scripts/d.rast.leg/d.rast.leg.html index 49714803c2a..f92bdd75f2d 100644 --- a/scripts/d.rast.leg/d.rast.leg.html +++ b/scripts/d.rast.leg/d.rast.leg.html @@ -20,7 +20,7 @@

    NOTES

    which the legend is generated. This is useful to visualize (time) series of raster maps with a common static legend instead of the default dynamic legend.

    To remove all frames when clearing the display, use -d.erase -f. +d.erase -f.

    EXAMPLE

    diff --git a/scripts/d.shade/d.shade.html b/scripts/d.shade/d.shade.html index 34286919468..ee081872a5f 100644 --- a/scripts/d.shade/d.shade.html +++ b/scripts/d.shade/d.shade.html @@ -24,8 +24,8 @@

    NOTES

    EXAMPLES

    -In this example, the aspect map in the North Carolina sample -dataset is used to hillshade the elevation map: +In this example, the aspect map in the North Carolina sample +dataset is used to hillshade the elevation map:
     g.region raster=aspect -p
    @@ -64,7 +64,7 @@ 

    EXAMPLES

    Interesting visualizations can be created using different color tables for -elevation raster map, for example using haxby color table. +elevation raster map, for example using haxby color table.

    SEE ALSO

    diff --git a/scripts/d.to.rast/d.to.rast.html b/scripts/d.to.rast/d.to.rast.html index c0330e066b5..4efa1bb4f83 100644 --- a/scripts/d.to.rast/d.to.rast.html +++ b/scripts/d.to.rast/d.to.rast.html @@ -46,4 +46,4 @@

    SEE ALSO

    AUTHOR

    -Anna Petrasova, NCSU OSGeoREL +Anna Petrasova, NCSU GeoForAll Lab diff --git a/scripts/d.what.rast/d.what.rast.html b/scripts/d.what.rast/d.what.rast.html index ac5d314d11f..949835f8372 100644 --- a/scripts/d.what.rast/d.what.rast.html +++ b/scripts/d.what.rast/d.what.rast.html @@ -1,6 +1,6 @@

    DESCRIPTION

    -d.what.rast d.what.rast outputs the category value(s) +d.what.rast outputs the category value(s) associated with user-specified location(s) in user-specified raster map layer(s). The active monitor can be selected with d.mon. @@ -18,4 +18,4 @@

    SEE ALSO

    AUTHOR

    -Anna Petrasova, NCSU OSGeoREL +Anna Petrasova, NCSU GeoForAll Lab diff --git a/scripts/d.what.vect/d.what.vect.html b/scripts/d.what.vect/d.what.vect.html index 4b6303e1ca0..d891b350927 100644 --- a/scripts/d.what.vect/d.what.vect.html +++ b/scripts/d.what.vect/d.what.vect.html @@ -1,6 +1,6 @@

    DESCRIPTION

    -d.what.vect d.what.vect outputs the category value(s) +d.what.vect outputs the category value(s) associated with user-specified location(s) in user-specified vector map layer(s). The active monitor can be selected with d.mon. @@ -18,4 +18,4 @@

    SEE ALSO

    AUTHOR

    -Anna Petrasova, NCSU OSGeoREL +Anna Petrasova, NCSU GeoForAll Lab diff --git a/scripts/db.dropcolumn/db.dropcolumn.py b/scripts/db.dropcolumn/db.dropcolumn.py index 24681b18b23..e038deda38a 100755 --- a/scripts/db.dropcolumn/db.dropcolumn.py +++ b/scripts/db.dropcolumn/db.dropcolumn.py @@ -102,7 +102,8 @@ def main(): driver=driver, ).split(".")[0:2] - if [int(i) for i in sqlite3_version] >= [int(i) for i in "3.35".split(".")]: + # sqlite version 3.35 compared here + if [int(i) for i in sqlite3_version] >= [int(i) for i in ["3", "35"]]: sql = "ALTER TABLE %s DROP COLUMN %s" % (table, column) if column == "cat": sql = "DROP INDEX %s_%s; %s" % (table, column, sql) diff --git a/scripts/db.droptable/db.droptable.html b/scripts/db.droptable/db.droptable.html index 68cd12be937..bf5a7209e0a 100644 --- a/scripts/db.droptable/db.droptable.html +++ b/scripts/db.droptable/db.droptable.html @@ -33,7 +33,7 @@

    Removing an attribute table from given database

    db.droptable allows defining optionally driver and database options different from default connection settings -(db.connect -p). +(db.connect -p).
     # drop the table from SQLite database
    diff --git a/scripts/db.in.ogr/db.in.ogr.html b/scripts/db.in.ogr/db.in.ogr.html
    index 70e7dddc1b1..14c46cd6c06 100644
    --- a/scripts/db.in.ogr/db.in.ogr.html
    +++ b/scripts/db.in.ogr/db.in.ogr.html
    @@ -12,11 +12,12 @@ 

    Import CSV file

    Limited type recognition can be done for Integer, Real, String, Date, Time and DateTime columns through a descriptive file with same name as the CSV file, but .csvt extension -(see details here). +(see details here).
     # NOTE: create koeppen_gridcode.csvt first for automated type recognition
    -db.in.ogr input=koeppen_gridcode.csv output=koeppen_gridcode
    +db.in.ogr input=koeppen_gridcode.csv output=koeppen_gridcode gdal_doo="AUTODETECT_TYPE=YES"
    +db.describe koeppen_gridcode -c
     db.select table=koeppen_gridcode
     
    @@ -50,7 +51,7 @@

    Import of a PostgreSQL table

    Import XLS file

    To force reading headers, define environmental -variable OGR_XLS_HEADERS='FORCE'. Parameter db_table +variable OGR_XLS_HEADERS='FORCE'. Parameter db_table refers to the list within XLS file.
    diff --git a/scripts/db.out.ogr/db.out.ogr.html b/scripts/db.out.ogr/db.out.ogr.html
    index 5a2727dd566..cf07de25e96 100644
    --- a/scripts/db.out.ogr/db.out.ogr.html
    +++ b/scripts/db.out.ogr/db.out.ogr.html
    @@ -30,7 +30,7 @@ 

    Export of GRASS GIS attribute table into a PostgreSQL table

     db.out.ogr input=precip_30ynormals \
    -	   output="PG:host=localhost dbname=meteo user=neteler" \
    +           output="PG:host=localhost dbname=meteo user=neteler" \
                format=PostgreSQL
     # verify
     echo "SELECT * FROM precip_30ynormals" | psql meteo
    diff --git a/scripts/db.test/db.test.html b/scripts/db.test/db.test.html
    index fb7d6271ab2..5484d91fd5b 100644
    --- a/scripts/db.test/db.test.html
    +++ b/scripts/db.test/db.test.html
    @@ -2,11 +2,12 @@ 

    DESCRIPTION

    db.test tests database driver and database server running set of SQL queries. Database must exist and connection must be set -by db.connect. +by db.connect.

    EXAMPLE

    Test current SQL backend driver: +

     db.connect -p
     db.test test=test1
    diff --git a/scripts/db.univar/db.univar.py b/scripts/db.univar/db.univar.py
    index 4654e3011a0..859e0fd831c 100755
    --- a/scripts/db.univar/db.univar.py
    +++ b/scripts/db.univar/db.univar.py
    @@ -277,7 +277,8 @@ def main():
                 sys.stdout.write("coeff_var=0\n")
             sys.stdout.write("sum=%.15g\n" % sum)
         else:
    -        raise ValueError(f"Unknown output format {output_format}")
    +        msg = f"Unknown output format {output_format}"
    +        raise ValueError(msg)
     
         if not extend:
             return
    diff --git a/scripts/g.extension.all/g.extension.all.html b/scripts/g.extension.all/g.extension.all.html
    index 5b0cb992e3e..e06872d2f16 100644
    --- a/scripts/g.extension.all/g.extension.all.html
    +++ b/scripts/g.extension.all/g.extension.all.html
    @@ -4,13 +4,13 @@ 

    DESCRIPTION

    installed GRASS Addons extensions in local GRASS installation. The extensions can be installed by g.extension. List of -locally installed extensions can be printed by g.extension --a. +locally installed extensions can be printed by g.extension +-a.

    EXAMPLES

    Rebuild locally installed extensions which were built against -different GIS Library (see g.version -r) +different GIS Library (see g.version -r)
     g.extension.rebuild.all
    diff --git a/scripts/g.extension/g.extension.html b/scripts/g.extension/g.extension.html
    index 06e24cb4e43..1fc8a5a96b2 100644
    --- a/scripts/g.extension/g.extension.html
    +++ b/scripts/g.extension/g.extension.html
    @@ -36,19 +36,19 @@ 

    Where the extensions are installed

    directory. The default is a directory for application data and settings inside the user's home directory. -On GNU/Linux it is $HOME/.grass8/addons, -on MS-Windows it is %APPDATA%\Roaming\GRASS8\addons. -The name of the directory is stored in the GRASS_ADDON_BASE +On GNU/Linux it is $HOME/.grass8/addons, +on MS-Windows it is %APPDATA%\Roaming\GRASS8\addons. +The name of the directory is stored in the GRASS_ADDON_BASE environmental variable.

    The flag -s changes this install target directory to the GRASS GIS installation directory -(determined by GISBASE environmental variable, e.g. /usr/) -rather than the default directory defined as per GRASS_ADDON_BASE +(determined by GISBASE environmental variable, e.g. /usr/) +rather than the default directory defined as per GRASS_ADDON_BASE (see also documentation for variables). g.extension checks if the user has permission to write to -GISBASE or GRASS_ADDON_BASE. +GISBASE or GRASS_ADDON_BASE.

    The place where the extensions are installed can be customized by @@ -77,7 +77,7 @@

    Local source code directory

    Source code in a ZIP or TAR archive

    In addition, new extension can be also installed from a ZIP file -or an archive file from the TAR family (e.g., .tar.gz or .bz2). +or an archive file from the TAR family (e.g., .tar.gz or .bz2). The file can be on disk (specified with a path), or on the web (specified by an URL). @@ -85,7 +85,7 @@

    Online repositories: GitHub, GitLab and Bitbucket

    For well known general hosting services, namely GitHub, GitLab and Bitbucket, g.extension supports the download of a repository. Here the user only needs to provide a base URL to the repository web page -(with or without the https:// part). +(with or without the https:// part). For GitHub, GitLab and Bitbucket, the latest source code in the default branch is downloaded, unless a specific branch is requested in the branch option. @@ -276,4 +276,4 @@

    AUTHORS

    Markus Neteler (original shell script)
    Martin Landa, Czech Technical University in Prague, Czech Republic (Python rewrite)
    -Vaclav Petras, NCSU OSGeoREL (support for general sources, partial refactoring) +Vaclav Petras, NCSU GeoForAll Lab (support for general sources, partial refactoring) diff --git a/scripts/g.extension/g.extension.py b/scripts/g.extension/g.extension.py index f14a742ca06..01f5b0a969f 100644 --- a/scripts/g.extension/g.extension.py +++ b/scripts/g.extension/g.extension.py @@ -1076,12 +1076,16 @@ def write_xml_extensions(name, tree=None): # extension name file_.write('%s\n' % (" " * indent, tnode.get("name"))) indent += 4 - """ - file_.write('%s%s\n' % - (' ' * indent, tnode.find('description').text)) - file_.write('%s%s\n' % - (' ' * indent, tnode.find('keywords').text)) - """ + + # file_.write( + # "%s%s\n" + # % (" " * indent, tnode.find("description").text) + # ) + # file_.write( + # "%s%s\n" + # % (" " * indent, tnode.find("keywords").text) + # ) + # extension files bnode = tnode.find("binary") if bnode is not None: @@ -1392,16 +1396,16 @@ def install_extension_xml(edict): for name in edict: # so far extensions do not have description or keywords # only modules have - """ - try: - desc = gtask.parse_interface(name).description - # mname = gtask.parse_interface(name).name - keywords = gtask.parse_interface(name).keywords - except Exception as e: - gs.warning(_("No addons metadata available." - " Addons metadata file not updated.")) - return [] - """ + + # try: + # desc = gtask.parse_interface(name).description + # # mname = gtask.parse_interface(name).name + # keywords = gtask.parse_interface(name).keywords + # except Exception as e: + # gs.warning( + # _("No addons metadata available. Addons metadata file not updated.") + # ) + # return [] tnode = None for node in tree.findall("task"): @@ -1412,14 +1416,13 @@ def install_extension_xml(edict): if tnode is None: # create new node for task tnode = ET.Element("task", attrib={"name": name}) - """ - dnode = etree.Element('description') - dnode.text = desc - tnode.append(dnode) - knode = etree.Element('keywords') - knode.text = (',').join(keywords) - tnode.append(knode) - """ + + # dnode = ET.Element("description") + # dnode.text = desc + # tnode.append(dnode) + # knode = ET.Element("keywords") + # knode.text = (",").join(keywords) + # tnode.append(knode) # create binary bnode = ET.Element("binary") @@ -1549,37 +1552,36 @@ def install_module_xml(mlist): # binary files installed with an extension are now # listed in extensions.xml - """ - # create binary - bnode = etree.Element('binary') - list_of_binary_files = [] - for file_name in os.listdir(url): - file_type = os.path.splitext(file_name)[-1] - file_n = os.path.splitext(file_name)[0] - html_path = os.path.join(options['prefix'], 'docs', 'html') - c_path = os.path.join(options['prefix'], 'bin') - py_path = os.path.join(options['prefix'], 'scripts') - # html or image file - if file_type in ['.html', '.jpg', '.png'] \ - and file_n in os.listdir(html_path): - list_of_binary_files.append(os.path.join(html_path, file_name)) - # c file - elif file_type in ['.c'] and file_name in os.listdir(c_path): - list_of_binary_files.append(os.path.join(c_path, file_n)) - # python file - elif file_type in ['.py'] and file_name in os.listdir(py_path): - list_of_binary_files.append(os.path.join(py_path, file_n)) - # man file - man_path = os.path.join(options['prefix'], 'docs', 'man', 'man1') - if name + '.1' in os.listdir(man_path): - list_of_binary_files.append(os.path.join(man_path, name + '.1')) - # add binaries to xml file - for binary_file_name in list_of_binary_files: - fnode = etree.Element('file') - fnode.text = binary_file_name - bnode.append(fnode) - tnode.append(bnode) - """ + # # create binary + # bnode = etree.Element("binary") + # list_of_binary_files = [] + # for file_name in os.listdir(url): + # file_type = os.path.splitext(file_name)[-1] + # file_n = os.path.splitext(file_name)[0] + # html_path = os.path.join(options["prefix"], "docs", "html") + # c_path = os.path.join(options["prefix"], "bin") + # py_path = os.path.join(options["prefix"], "scripts") + # # html or image file + # if file_type in [".html", ".jpg", ".png"] and file_n in os.listdir( + # html_path + # ): + # list_of_binary_files.append(os.path.join(html_path, file_name)) + # # c file + # elif file_type in [".c"] and file_name in os.listdir(c_path): + # list_of_binary_files.append(os.path.join(c_path, file_n)) + # # python file + # elif file_type in [".py"] and file_name in os.listdir(py_path): + # list_of_binary_files.append(os.path.join(py_path, file_n)) + # # man file + # man_path = os.path.join(options["prefix"], "docs", "man", "man1") + # if name + ".1" in os.listdir(man_path): + # list_of_binary_files.append(os.path.join(man_path, name + ".1")) + # # add binaries to xml file + # for binary_file_name in list_of_binary_files: + # fnode = etree.Element("file") + # fnode.text = binary_file_name + # bnode.append(fnode) + # tnode.append(bnode) tree.append(tnode) else: gs.verbose( @@ -2589,7 +2591,6 @@ def resolve_known_host_service(url, name, branch): def validate_url(url): - """""" if not os.path.exists(url): url_validated = False message = None @@ -2835,10 +2836,8 @@ def main(): install_extension() else: if original_url == "" or flags["o"]: - """ - Query GitHub API only if extension will be downloaded - from official GRASS GIS addon repository - """ + # Query GitHub API only if extension will be downloaded + # from official GRASS GIS addons repository get_addons_paths(gg_addons_base_dir=options["prefix"]) source, url = resolve_source_code( name=options["extension"], diff --git a/scripts/g.extension/testsuite/data/sample_modules/r.plus.example/r.plus.example.html b/scripts/g.extension/testsuite/data/sample_modules/r.plus.example/r.plus.example.html index 394878481e4..d746306326e 100644 --- a/scripts/g.extension/testsuite/data/sample_modules/r.plus.example/r.plus.example.html +++ b/scripts/g.extension/testsuite/data/sample_modules/r.plus.example/r.plus.example.html @@ -18,4 +18,4 @@

    SEE ALSO

    AUTHOR

    -Vaclav Petras, NCSU OSGeoREL
    +Vaclav Petras, NCSU GeoForAll Lab diff --git a/scripts/g.extension/testsuite/test_addons_download.py b/scripts/g.extension/testsuite/test_addons_download.py index 07be93cf878..111e591f90d 100644 --- a/scripts/g.extension/testsuite/test_addons_download.py +++ b/scripts/g.extension/testsuite/test_addons_download.py @@ -51,10 +51,9 @@ def setUp(self): if self.install_prefix.exists(): files = [path.name for path in self.install_prefix.iterdir()] if files: - raise RuntimeError( - f"Install prefix path '{self.install_prefix}' \ + msg = f"Install prefix path '{self.install_prefix}' \ contains files {','.join(files)}" - ) + raise RuntimeError(msg) def tearDown(self): """Remove created files""" diff --git a/scripts/g.extension/testsuite/test_addons_modules.py b/scripts/g.extension/testsuite/test_addons_modules.py index 9b99e7ac2ae..956b092bce4 100644 --- a/scripts/g.extension/testsuite/test_addons_modules.py +++ b/scripts/g.extension/testsuite/test_addons_modules.py @@ -81,11 +81,10 @@ def setUp(self): if os.path.exists(self.install_prefix): files = os.listdir(self.install_prefix) if files: - raise RuntimeError( - "Install prefix path '{}' contains files {}".format( - self.install_prefix, files - ) + msg = "Install prefix path '{}' contains files {}".format( + self.install_prefix, files ) + raise RuntimeError(msg) def tearDown(self): """Remove created files""" diff --git a/scripts/g.manual/g.manual.html b/scripts/g.manual/g.manual.html index 877497d58f3..2c46f7765a2 100644 --- a/scripts/g.manual/g.manual.html +++ b/scripts/g.manual/g.manual.html @@ -6,7 +6,7 @@

    DESCRIPTION

    NOTES

    The name of the browser is defined in the environment variable -GRASS_HTML_BROWSER. For most platforms this should be an +GRASS_HTML_BROWSER. For most platforms this should be an executable in your PATH, or the full path to an executable. See variables for details. diff --git a/scripts/i.band.library/i.band.library.html b/scripts/i.band.library/i.band.library.html index 196f1319e3a..8b1975b53f2 100644 --- a/scripts/i.band.library/i.band.library.html +++ b/scripts/i.band.library/i.band.library.html @@ -159,15 +159,15 @@

    Band reference registry files

    Each series starts with an unique identifier ("Sentinel2" in example above). Required attributes are only two: -a shortcut and bands. Note that a shortcut +a shortcut and bands. Note that a shortcut must be unique in all band reference registry files. Number of other attributes is not defined or even limited (in example above -only description and instruments attributes are -defined). List of bands is defined by a bands attribute. Each +only description and instruments attributes are +defined). List of bands is defined by a bands attribute. Each band is defined by an identifier ("1", "2" in example above). List of attributes describing each band is not pre-defined or limited. In -example above each band is described by a central wavelength -(nm), bandwidth (nm), and a tag. +example above each band is described by a central wavelength +(nm), bandwidth (nm), and a tag.

    Band reference identifier defined by pattern option is given by @@ -177,7 +177,7 @@

    Band reference registry files

    System-defined registry files are located in GRASS GIS installation -directory ($GISBASE/etc/i.band.library). Note that +directory ($GISBASE/etc/i.band.library). Note that currently i.band.library allows managing only system-defined registry files. Support for user-defined registry files is planned to be implemented, see KNOWN ISSUES section for diff --git a/scripts/i.in.spotvgt/i.in.spotvgt.html b/scripts/i.in.spotvgt/i.in.spotvgt.html index 38719f79fc2..ad4a4466294 100644 --- a/scripts/i.in.spotvgt/i.in.spotvgt.html +++ b/scripts/i.in.spotvgt/i.in.spotvgt.html @@ -13,6 +13,8 @@

    NOTES

    The SPOT VGT files are delivered in HDF4 (Hierarchical Data Format Release 4) format. It is required to have the GDAL libraries installed with HDF4 support. +

    EXAMPLE

    +

    Export of entire world SPOT VGT maps

    When working with SPOT VGT with entire world extent, it is recommended diff --git a/scripts/i.oif/i.oif.html b/scripts/i.oif/i.oif.html index e763154f201..6420c79daba 100644 --- a/scripts/i.oif/i.oif.html +++ b/scripts/i.oif/i.oif.html @@ -44,7 +44,7 @@

    NOTES

    By default the module will calculate standard deviations for all bands in -parallel. To run serially use the -s flag. If the WORKERS +parallel. To run serially use the -s flag. If the WORKERS environment variable is set, the number of concurrent processes will be limited to that number of jobs. diff --git a/scripts/i.pansharpen/i.pansharpen.html b/scripts/i.pansharpen/i.pansharpen.html index 0db14283be9..2ccbb6ad66a 100644 --- a/scripts/i.pansharpen/i.pansharpen.html +++ b/scripts/i.pansharpen/i.pansharpen.html @@ -1,6 +1,6 @@

    DESCRIPTION

    -i.pansharpen uses a high resolution panchromatic band from a +i.pansharpen uses a high resolution panchromatic band from a multispectral image to sharpen 3 lower resolution bands. The 3 lower resolution bands can then be combined into an RGB color image at a higher (more detailed) resolution than is possible using the original 3 @@ -9,55 +9,62 @@

    DESCRIPTION

    and a high resolution panchromatic band 8 at 15m resolution. Pan sharpening allows bands 3-2-1 (or other combinations of 30m resolution bands like 4-3-2 or 5-4-2) to be combined into a 15m resolution color image. -

    -i.pansharpen offers a choice of three different 'pan sharpening' + +

    +i.pansharpen offers a choice of three different 'pan sharpening' algorithms: IHS, Brovey, and PCA. -

    + +

    For IHS pan sharpening, the original 3 lower resolution bands, selected as red, green and blue channels for creating an RGB composite image, are transformed into IHS (intensity, hue, and saturation) color space. The panchromatic band is then substituted for the intensity channel (I), combined with the original hue (H) and saturation (S) channels, and transformed back to RGB color space at the higher resolution of the panchromatic band. The -algorithm for this can be represented as: RGB -> IHS -> [pan]HS -> RGB. -

    +algorithm for this can be represented as: RGB -> IHS -> [pan]HS -> RGB. + +

    With a Brovey pan sharpening, each of the 3 lower resolution bands and panchromatic band are combined using the following algorithm to calculate 3 new bands at the higher resolution (example for band 1): +

                              band1
         new band1 = ----------------------- * panband
                      band1 + band2 + band3
     
    + In PCA pan sharpening, a principal component analysis is performed on the original 3 lower resolution bands to create 3 principal component images (PC1, PC2, and PC3) and their associated eigenvectors (EV), such that: -
     
    +
          band1  band2  band3
     PC1: EV1-1  EV1-2  EV1-3
     PC2: EV2-1  EV2-2  EV2-3
     PC3: EV3-1  EV3-2  EV3-3
    +
    and +
     PC1 = EV1-1 * band1 + EV1-2 * band2 + EV1-3 * band3 - mean(bands 1,2,3)
    -
     
    + An inverse PCA is then performed, substituting the panchromatic band for PC1. To do this, the eigenvectors matrix is inverted (in this case transposed), the PC images are multiplied by the eigenvectors with the panchromatic band substituted for PC1, and mean of each band is added to each transformed image band using the following algorithm (example for band 1): -
     
    +
     band1 = pan * EV1-1 + PC2 * EV1-2 + PC3 * EV1-3 + mean(band1)
    -
     
    + The assignment of the channels depends on the satellite. Examples of satellite imagery with high resolution panchromatic bands, and lower resolution spectral bands include Landsat 7 ETM, QuickBird, and SPOT. -
    +

    NOTES

    The module works for 2-bit to 30-bit images. All images are rescaled to 8-bit @@ -69,7 +76,8 @@

    NOTES

    be rescaled to 0-255. This can give better visual distinction to features, especially when the range of actual values in an image only occupies a relatively limited portion of the possible range. -

    + +

    i.pansharpen temporarily changes the computational region to the high resolution of the panchromatic band during sharpening calculations, then restores the previous region settings. The current region coordinates (and @@ -77,11 +85,13 @@

    NOTES

    histogram matched to the band it is replaces prior to substitution (i.e., the intensity channel for IHS sharpening, the low res band selected for each color channel with Brovey sharpening, and the PC1 image for PCA sharpening). -

    + +

    By default, the command will attempt to employ parallel processing, using up to 3 cores simultaneously. The -s flag will disable parallel processing, but does use an optimized r.mapcalc expression to reduce disk I/O. -

    + +

    The three pan-sharpened output channels may be combined with d.rgb or r.composite. Colors may be optionally optimized with i.colors.enhance. While the resulting color image will be at the higher resolution in all cases, @@ -172,7 +182,7 @@

    Pan sharpening comparison example

    Results:

    -
    The coordinate system and tile layout of a voxel map in GRASS
    +
     R, G, B composite of Landsat at 30m @@ -216,29 +226,29 @@

    REFERENCES

    Roller, N.E.G. and Cox, S., (1980). Comparison of Landsat MSS and merged MSS/RBV data for analysis of natural vegetation. Proc. of the 14th International Symposium on Remote Sensing - of Environment, San Jose, Costa Rica, 23-30 April, pp. 1001-1007 + of Environment, San Jose, Costa Rica, 23-30 April, pp. 1001-1007
  • Amarsaikhan, D., Douglas, T. (2004). Data fusion and multisource image - classification. International Journal of Remote Sensing, 25(17), 3529-3539. + classification. International Journal of Remote Sensing, 25(17), 3529-3539.
  • Behnia, P. (2005). Comparison between four methods for data fusion of ETM+ - multispectral and pan images. Geo-spatial Information Science, 8(2), 98-103. + multispectral and pan images. Geo-spatial Information Science, 8(2), 98-103.
  • Du, Q., Younan, N. H., King, R., Shah, V. P. (2007). On the Performance Evaluation of Pan-Sharpening Techniques. Geoscience and Remote Sensing - Letters, IEEE, 4(4), 518-522. + Letters, IEEE, 4(4), 518-522.
  • Karathanassi, V., Kolokousis, P., Ioannidou, S. (2007). A comparison study on fusion methods using evaluation indicators. International Journal - of Remote Sensing, 28(10), 2309-2341. + of Remote Sensing, 28(10), 2309-2341.
  • Neteler, M, D. Grasso, I. Michelazzi, L. Miori, S. Merler, and C. Furlanello (2005). An integrated toolbox for image registration, fusion and classification. International Journal of Geoinformatics, 1(1):51-61 - (PDF) + (PDF)
  • Pohl, C, and J.L van Genderen (1998). Multisensor image fusion in remote - sensing: concepts, methods and application. Int. J. of Rem. Sens., 19, 823-854. + sensing: concepts, methods and application. Int. J. of Rem. Sens., 19, 823-854.
  • SEE ALSO

    diff --git a/scripts/i.spectral/i.spectral.html b/scripts/i.spectral/i.spectral.html index 128a0679ec9..01d747e3836 100644 --- a/scripts/i.spectral/i.spectral.html +++ b/scripts/i.spectral/i.spectral.html @@ -18,7 +18,7 @@

    EXAMPLE

    -
    +
    Spectral plot of 3 different land cover types: (1) water, (2) green vegetation, and (3) highway
    diff --git a/scripts/i.tasscap/i.tasscap.html b/scripts/i.tasscap/i.tasscap.html index d197df7faf0..9281ef10243 100644 --- a/scripts/i.tasscap/i.tasscap.html +++ b/scripts/i.tasscap/i.tasscap.html @@ -15,10 +15,10 @@

    DESCRIPTION

    The following tasseled cap components are generated:
      -
    • tasscap.1: corresponds to brightness, -
    • tasscap.2: corresponds to greenness, -
    • tasscap.3: corresponds to wetness, -
    • tasscap.4: corresponds to atmospheric haze (only selected sensors: Landsat 5,7,8). +
    • tasscap.1: corresponds to brightness,
    • +
    • tasscap.2: corresponds to greenness,
    • +
    • tasscap.3: corresponds to wetness,
    • +
    • tasscap.4: corresponds to atmospheric haze (only selected sensors: Landsat 5,7,8).

    EXAMPLE

    @@ -39,7 +39,7 @@

    EXAMPLE

    - +
     'Brightness' Tasseled Cap component 1 @@ -86,7 +86,8 @@

    REFERENCES

  • LANDSAT-7: TASSCAP factors cited from: DERIVATION OF A TASSELED CAP TRANSFORMATION BASED ON LANDSAT 7 AT-SATELLITE REFLECTANCE. Chengquan Huang, Bruce Wylie, Limin Yang, Collin Homer and Gregory Zylstra Raytheon ITSS, - USGS EROS Data Center Sioux Falls, SD 57198, USA (http://landcover.usgs.gov/pdf/tasseled.pdf). + USGS EROS Data Center Sioux Falls, SD 57198, USA + (PDF). This is published as well in INT. J. OF RS, 2002, VOL 23, NO. 8, 1741-1748.
  • MODIS Tasseled Cap coefficients - Ref: Lobser & Cohen (2007). MODIS tasseled cap: land cover characteristics expressed through transformed MODIS data. diff --git a/scripts/i.tasscap/i.tasscap.py b/scripts/i.tasscap/i.tasscap.py index dc767fd3f19..fb22c6048ed 100755 --- a/scripts/i.tasscap/i.tasscap.py +++ b/scripts/i.tasscap/i.tasscap.py @@ -27,7 +27,7 @@ # DERIVATION OF A TASSELED CAP TRANSFORMATION BASED ON LANDSAT 7 AT-SATELLITE REFLECTANCE # Chengquan Huang, Bruce Wylie, Limin Yang, Collin Homer and Gregory Zylstra Raytheon ITSS, # USGS EROS Data Center Sioux Falls, SD 57198, USA -# http://landcover.usgs.gov/pdf/tasseled.pdf +# https://digitalcommons.unl.edu/usgsstaffpub/621/ # # This is published as well in INT. J. OF RS, 2002, VOL 23, NO. 8, 1741-1748. # Compare discussion: diff --git a/scripts/m.proj/m.proj.html b/scripts/m.proj/m.proj.html index b67f6c1fa6f..b8015206b28 100644 --- a/scripts/m.proj/m.proj.html +++ b/scripts/m.proj/m.proj.html @@ -23,19 +23,19 @@

    DESCRIPTION

    NOTES

    -cs2cs expects input data to formatted as x y, so if -working with latitude-longitude data be sure to send the x -value first, i.e., longitude latitude. Output data will +cs2cs expects input data to formatted as x y, so if +working with latitude-longitude data be sure to send the x +value first, i.e., longitude latitude. Output data will be exported using the same convention.

    -cs2cs will treat a third data column as a z value +cs2cs will treat a third data column as a z value (elevation) and will modify the value accordingly. This usually translates into small but real differences in that data column.

    cs2cs does not expect the input stream to contain column headings, only numbers. If your data file has lines you wish to have passed through without being processed, they must start with the -'#' character. +'#' character.

    If sending m.proj data from standard input, be aware that the data is first stored to a temporary file before being processed @@ -58,7 +58,7 @@

    NOTES

    If output is to lat/long, it will be formatted using PROJ's Degree:Minute:Second (DMS) convention -of DDDdMM'SSS.SS"H. This can be handy if you wish to quickly +of DDDdMM'SSS.SS"H. This can be handy if you wish to quickly convert lat/long decimal degree data into its DMS equivalent.
    Alternatively, to have m.proj output data in decimal degrees, use the -d flag. This flag can also be used with non-lat/long @@ -66,7 +66,7 @@

    NOTES

    default is 2).

    Note that Lat/long output can be converted to GRASS's DMS convention -(DDD:MM:SSS.SSSH) by piping the results of m.proj +(DDD:MM:SSS.SSSH) by piping the results of m.proj through the sed stream editor as follows.

    @@ -105,7 +105,7 @@ 

    Reproject Long/Lat WGS84 coordinate pair to current map projection

    -The same, but load points from a file named waypoints.txt and +The same, but load points from a file named waypoints.txt and continue on to import the results into a GRASS vector points map in the current map projection:

    @@ -141,25 +141,25 @@ 

    Custom projection parameter usage

    -Projection parameters provided in the above case: +proj -(projection type), +name (projection name), +a -(ellipsoid: equatorial radius), +es (ellipsoid: -eccentricity squared), +zone (zone for the area), -+unfact (conversion factor from meters to other units, -e.g. feet), +lat_0 (standard parallel), +lon_0 -(central meridian), +k (scale factor) and +x_0 +Projection parameters provided in the above case: +proj +(projection type), +name (projection name), +a +(ellipsoid: equatorial radius), +es (ellipsoid: +eccentricity squared), +zone (zone for the area), ++unfact (conversion factor from meters to other units, +e.g. feet), +lat_0 (standard parallel), +lon_0 +(central meridian), +k (scale factor) and +x_0 (false easting). Sometimes false northing is needed which is coded as -+y_0. Internally, the underlying ++y_0. Internally, the underlying PROJ projection library performs an inverse projection to latitude-longitude and then projects the coordinate list to the target projection.

    Datum conversions are automatically handled by the PROJ library if -+datum settings are specified on both the input and output -projections on the command line. The +towgs84 parameter can be used to ++datum settings are specified on both the input and output +projections on the command line. The +towgs84 parameter can be used to define either 3 or 7 term datum transform coefficients, satisfying this requirement. -

    If a datum is specified there is no need for the +ellps= or underlying -parameters, +a=, +es=, etc. +

    If a datum is specified there is no need for the +ellps= or underlying +parameters, +a=, +es=, etc.

    Another custom parameter usage example: diff --git a/scripts/r.fillnulls/r.fillnulls.html b/scripts/r.fillnulls/r.fillnulls.html index 269b07631b2..7d29777face 100644 --- a/scripts/r.fillnulls/r.fillnulls.html +++ b/scripts/r.fillnulls/r.fillnulls.html @@ -16,10 +16,10 @@

    NOTES

    is generated in a hole. The width of edge area can be adjusted by changing the edge parameter.

    During the interpolation following warning may occur when using the RST method:

    - + Warning: strip exists with insufficient data
    Warning: taking too long to find points for interpolation--please change -the region to area where your points are
    +the region to area where your points are

    This warning is generated if large data holes exist within the surface. As the idea of r.fillnulls is to fill such holes, the user may @@ -71,7 +71,7 @@

    EXAMPLE

    d.histogram elev_srtm_30m # remove SRTM outliers, i.e. SRTM below 50m (esp. lakes), leading to no data areas -r.mapcalc "elev_srtm_30m_filt = if(elev_srtm_30m < 50.0, null(), elev_srtm_30m)" +r.mapcalc "elev_srtm_30m_filt = if(elev_srtm_30m < 50.0, null(), elev_srtm_30m)" d.histogram elev_srtm_30m_filt d.rast elev_srtm_30m_filt @@ -103,22 +103,22 @@

    REFERENCES

  • Mitas, L., Mitasova, H., 1999, Spatial Interpolation. In: P.Longley, M.F. Goodchild, D.J. Maguire, D.W.Rhind (Eds.), Geographical Information Systems: Principles, Techniques, Management and Applications, Wiley, -pp.481-492 +pp.481-492
  • Mitasova H., Mitas L.,  Brown W.M.,  D.P. Gerdes, I. Kosinovsky, Baker, T.1995, Modeling spatially and temporally distributed phenomena: New methods and tools for GRASS GIS. International Journal of GIS, 9 (4), special issue on Integrating GIS and Environmental modeling, -433-446. +433-446.
  • Mitasova H. and Mitas L. 1993: Interpolation by Regularized Spline with Tension: I. -Theory and Implementation, Mathematical Geology 25, 641-655. +Theory and Implementation, Mathematical Geology 25, 641-655.
  • Mitasova H. and Hofierka L. 1993: Interpolation by Regularized Spline with Tension: II. Application to Terrain Modeling and Surface Geometry Analysis, -Mathematical Geology 25, 657-667. +Mathematical Geology 25, 657-667.
  • SEE ALSO

    diff --git a/scripts/r.grow/testsuite/test_grow.py b/scripts/r.grow/testsuite/test_grow.py new file mode 100644 index 00000000000..80421da2fec --- /dev/null +++ b/scripts/r.grow/testsuite/test_grow.py @@ -0,0 +1,92 @@ +from grass.gunittest.case import TestCase +from grass.gunittest.main import test +from grass.gunittest.gmodules import SimpleModule + + +class TestRGrow(TestCase): + + @classmethod + def setUpClass(cls): + """Set up a small region and test map.""" + cls.output = "test_grow" + cls.runModule("g.region", n=10, s=0, e=10, w=0, res=1) + + # Create a test map with a centered 3x3 block of 1s + cls.runModule( + "r.mapcalc", + expression="test_map = if(row() > 3 && row() < 7 && col() > 3 && col() < 7, 1, null())", + overwrite=True, + ) + + @classmethod + def tearDownClass(cls): + """Clean up test maps after all tests.""" + cls.runModule("g.remove", type="raster", name="test_map,test_grow", flags="f") + + def tearDown(self): + """Remove output map after each test to prevent conflicts.""" + self.runModule("g.remove", type="raster", name=self.output, flags="f") + + def test_default_growth(self): + """Test default growth using Euclidean metric.""" + module = SimpleModule( + "r.grow", input="test_map", output=self.output, overwrite=True + ) + self.assertModule(module) + + self.assertRasterFitsUnivar(raster=self.output, reference="n=21") + + def test_grow_with_manhattan_metric(self): + """Test growth with Manhattan metric and float radius of 2""" + module = SimpleModule( + "r.grow", + input="test_map", + output=self.output, + radius=2, + metric="manhattan", + overwrite=True, + ) + self.assertModule(module) + + self.assertRasterFitsUnivar(raster=self.output, reference="n=21") + + def test_grow_with_maximum_metric(self): + """Test growth with Maximum metric.""" + module = SimpleModule( + "r.grow", + input="test_map", + output=self.output, + metric="maximum", + overwrite=True, + ) + self.assertModule(module) + + self.assertRasterFitsUnivar(raster=self.output, reference="n=25") + + def test_shrink_with_negative_radius(self): + """Test shrinking with a negative radius of -2 to reduce area size.""" + module = SimpleModule( + "r.grow", input="test_map", radius="-2", output=self.output, overwrite=True + ) + self.assertModule(module) + + self.assertRasterFitsUnivar(raster=self.output, reference="n=1") + + def test_old_value_replacement(self): + """Test replacing the original cells with -1 and new ones with 2.""" + module = SimpleModule( + "r.grow", + input="test_map", + output=self.output, + old=-1, + new="2", + overwrite=True, + ) + self.assertModule(module) + + expected_values = {"n": 21, "sum": 15, "min": -1, "max": 2} + self.assertRasterFitsUnivar(raster=self.output, reference=expected_values) + + +if __name__ == "__main__": + test() diff --git a/scripts/r.import/r.import.html b/scripts/r.import/r.import.html index 6858bdd85d3..6cd0aea6875 100644 --- a/scripts/r.import/r.import.html +++ b/scripts/r.import/r.import.html @@ -84,7 +84,7 @@

    EXAMPLES

    Import of SRTM V3 global data at 1 arc-seconds resolution

    The SRTM V3 1 arc-second global data (~30 meters resolution) are available -from EarthExplorer (http://earthexplorer.usgs.gov/). +from EarthExplorer (https://earthexplorer.usgs.gov/). The SRTM collections are located under the "Digital Elevation" category.

    Example for North Carolina sample dataset (the tile name is "n35_w079_1arc_v3.tif"): @@ -106,7 +106,7 @@

    Import of SRTM V3 global data at 1 arc-seconds resolution

    Import of WorldClim data

    -Import of a subset from WorldClim Bioclim data set, +Import of a subset from WorldClim Bioclim data set, to be reprojected to current project CRS (North Carolina sample dataset). Different resolutions are available, in this example we use the 2.5 arc-minutes resolution data. During import, we spatially subset the world data to the @@ -115,7 +115,7 @@

    Import of WorldClim data

     # download selected Bioclim data (2.5 arc-minutes resolution)
     # optionally tiles are available for the 30 arc-sec resolution
    -wget http://biogeo.ucdavis.edu/data/climate/worldclim/1_4/grid/cur/bio_2-5m_bil.zip
    +wget https://geodata.ucdavis.edu/climate/worldclim/1_4/grid/cur/bio_2-5m_bil.zip
     
     # extract BIO1 from package (BIO1 = Annual Mean Temperature):
     unzip bio_2-5m_bil.zip bio1.bil bio1.hdr
    diff --git a/scripts/r.in.srtm/r.in.srtm.html b/scripts/r.in.srtm/r.in.srtm.html
    index 6532b93523d..bd3832fc8ee 100644
    --- a/scripts/r.in.srtm/r.in.srtm.html
    +++ b/scripts/r.in.srtm/r.in.srtm.html
    @@ -4,7 +4,8 @@ 

    DESCRIPTION

    SRTM Version 1 and improved Version 2 data sets can be downloaded from NASA at this site:
    -http://dds.cr.usgs.gov/srtm/ +http://dds.cr.usgs.gov/srtm/ (archived)
    +https://earthexplorer.usgs.gov/

    Gap-filled SRTM Version 3 data can be downloaded from USGS at this site:
    @@ -35,10 +36,10 @@

    SEE ALSO

    r.in.nasadem (Addon) -

    The Shuttle Radar Topography Mission -homepage at NASA's JPL. +

    The Shuttle Radar Topography Mission +homepage at NASA's JPL. (archived)
    -The SRTM Web Forum +The SRTM Web Forum (archived)

    AUTHORS

    diff --git a/scripts/r.in.wms/r.in.wms.html b/scripts/r.in.wms/r.in.wms.html index 0031627991c..5fbc8d032e5 100644 --- a/scripts/r.in.wms/r.in.wms.html +++ b/scripts/r.in.wms/r.in.wms.html @@ -1,8 +1,8 @@

    DESCRIPTION

    r.in.wms handles all of downloading and importing raster data -from OGC -WMS and OGC +from OGC +WMS and OGC WMTS web mapping servers. It only needs be told the desired data to collect (bounds and resolution) via a region, the server to get the data from, and the layer or layers to get. It @@ -22,7 +22,7 @@

    NOTES

    When using GDAL WMS driver (driver=WMS_GDAL), the GDAL library needs to be built with WMS support, -see GDAL WMS manual page +see GDAL WMS manual page for details.

    Tiled WMS

    @@ -30,7 +30,7 @@

    Tiled WMS

    Into the parameter layers the name of the TiledGroup need to be inserted from Tile Service file. Time variable can be specified in urlparams parameter, -e.g: urlparams='time=2012-1-1'. +e.g: urlparams='time=2012-1-1'.

    EXAMPLES

    @@ -38,11 +38,11 @@

    General Get Capabilities Request

     # Topographic WMS with OpenStreetMap by mundialis
    -r.in.wms -c url="http://ows.mundialis.de/services/service?"
    -r.in.wms -c url="http://ows.mundialis.de/services/service?" | grep Name
    +r.in.wms -c url="https://ows.mundialis.de/services/service?"
    +r.in.wms -c url="https://ows.mundialis.de/services/service?" | grep Name
     
     # Czech WMS
    -r.in.wms -c url="http://wms.cuzk.cz/wms.asp"
    +r.in.wms -c url="https://wms.cuzk.cz/wms.asp"
     

    Download raster data from WMS server (GetMap request)

    @@ -70,8 +70,8 @@

    Open Street Map

    More OSM WMS servers can be found online, e.g. on the OSM wiki in a -OSM WMS Servers list -and on the OSM-WMS-EUROPE page. +OSM WMS Servers list +and on the WORLD_OSM_WMS page.

    Countries and coastlines

    @@ -154,14 +154,14 @@

    Satellite data covering Europe

    REQUIREMENTS

    -r.in.wms requires the gdalwarp +r.in.wms requires the gdalwarp utility from the GDAL/OGR library.

    REFERENCES

    SEE ALSO

    diff --git a/scripts/r.in.wms/wms_drv.py b/scripts/r.in.wms/wms_drv.py index f5e8cbeb914..e46eace4b72 100644 --- a/scripts/r.in.wms/wms_drv.py +++ b/scripts/r.in.wms/wms_drv.py @@ -329,18 +329,18 @@ def _computeRequestData(self, bbox, tl_corner, tile_span, tile_size, mat_num_bbo # request data bbox specified in row and col number self.t_num_bbox = {} - self.t_num_bbox["min_col"] = int( - floor((bbox["minx"] - tl_corner["minx"]) / tile_span["x"] + epsilon) + self.t_num_bbox["min_col"] = floor( + (bbox["minx"] - tl_corner["minx"]) / tile_span["x"] + epsilon ) - self.t_num_bbox["max_col"] = int( - floor((bbox["maxx"] - tl_corner["minx"]) / tile_span["x"] - epsilon) + self.t_num_bbox["max_col"] = floor( + (bbox["maxx"] - tl_corner["minx"]) / tile_span["x"] - epsilon ) - self.t_num_bbox["min_row"] = int( - floor((tl_corner["maxy"] - bbox["maxy"]) / tile_span["y"] + epsilon) + self.t_num_bbox["min_row"] = floor( + (tl_corner["maxy"] - bbox["maxy"]) / tile_span["y"] + epsilon ) - self.t_num_bbox["max_row"] = int( - floor((tl_corner["maxy"] - bbox["miny"]) / tile_span["y"] - epsilon) + self.t_num_bbox["max_row"] = floor( + (tl_corner["maxy"] - bbox["miny"]) / tile_span["y"] - epsilon ) # Does required bbox intersects bbox of data available on server? diff --git a/scripts/r.mapcalc.simple/r.mapcalc.simple.html b/scripts/r.mapcalc.simple/r.mapcalc.simple.html index 84d2aeeb0a9..1277602f1a7 100644 --- a/scripts/r.mapcalc.simple/r.mapcalc.simple.html +++ b/scripts/r.mapcalc.simple/r.mapcalc.simple.html @@ -6,7 +6,7 @@

    DESCRIPTION

    The general syntax for the expression follows r.mapcalc expression format, -for example, A + B or exp(A + B) are valid. +for example, A + B or exp(A + B) are valid. The variables A, B, ..., F represent raster maps which are provided as options a, b, ..., f. @@ -27,10 +27,10 @@

    NOTES

    • The input raster map names and the output map raster name are separate from the expression (formula) which uses generic - variable names (A, B, C, ...). -
    • The output raster name is not included in the expression. + variable names (A, B, C, ...).
    • +
    • The output raster name is not included in the expression.
    • The expression is expected to be a single short one liner - without the function eval(). + without the function eval().
    Differences to r.mapcalc.simple module in GRASS GIS 5 and 6: @@ -38,23 +38,23 @@

    NOTES

    • The primary purpose is not being a GUI front end to r.mapcalc, but a wrapper which allows easy building of - interfaces to r.mapcalc (including GUIs). + interfaces to r.mapcalc (including GUIs).
    • Whitespace (most notably spaces) are allowed - (in the same way as for r.mapcalc). + (in the same way as for r.mapcalc).
    • The variable names are case-insensitive to allow the original uppercase as well as lowercase as in option names - (unless the -c flag is used). -
    • Option names for each map are just one letter (not amap, etc.). + (unless the -c flag is used).
    • +
    • Option names for each map are just one letter (not amap, etc.).
    • Output option name is output as for other modules - (not outfile). -
    • Raster map names can be optionally quoted (the -q flag). + (not outfile).
    • +
    • Raster map names can be optionally quoted (the -q flag).
    • There is no expert mode - (which was just running r.mapcalc). + (which was just running r.mapcalc).
    • The expression option is first, so it is possible to omit its name in the command line - (just like with r.mapcalc). + (just like with r.mapcalc).
    • Overwriting of outputs is done in the same way as with other - modules, so there is no flag to not overwrite outputs. + modules, so there is no flag to not overwrite outputs.

    EXAMPLES

    diff --git a/scripts/r.mask/r.mask.html b/scripts/r.mask/r.mask.html index c520bbea353..312de990c47 100644 --- a/scripts/r.mask/r.mask.html +++ b/scripts/r.mask/r.mask.html @@ -1,6 +1,6 @@

    DESCRIPTION

    -r.mask - Facilitates creation of a raster "MASK" map to +r.mask facilitates the creation of a raster "MASK" map to control raster operations.

    @@ -23,7 +23,7 @@

    DESCRIPTION

    The user should be aware that a MASK remains in place until a user renames it to something other than "MASK", or removes it. To remove a mask and restore raster operations to normal (i.e., all cells of the current region), remove the -MASK by setting the -r remove MASK flag (r.mask -r). +MASK by setting the -r remove MASK flag (r.mask -r). Alternatively, a mask can be removed using g.remove or by renaming it to any other name with g.rename. @@ -34,7 +34,7 @@

    NOTES

    the MASK file is removed.

    r.mask uses r.reclass to create a reclassification of an -existing raster map and name it MASK. A reclass map takes up less +existing raster map and name it MASK. A reclass map takes up less space, but is affected by any changes to the underlying map from which it was created. The user can select category values from the input raster to use in the MASK with the maskcats parameter; if r.mask is run from the @@ -48,7 +48,7 @@

    Different ways to create a MASK

    that a MASK can also be created using other functions that have a raster as output, by naming the output raster 'MASK'. Such layers could have other values than 1 and NULL. The user should therefore be aware that grid cells -in the MASK map containing NULL or 0 will replace data with +in the MASK map containing NULL or 0 will replace data with NULL, while cells containing other values will allow data to pass through unaltered. This means that:

    @@ -136,7 +136,7 @@

    SEE ALSO

    r.mapcalc, r.reclass, g.remove, -g.rename +g.rename, r.quant diff --git a/scripts/r.out.xyz/r.out.xyz.html b/scripts/r.out.xyz/r.out.xyz.html index e603c592e32..8e40d2c9399 100644 --- a/scripts/r.out.xyz/r.out.xyz.html +++ b/scripts/r.out.xyz/r.out.xyz.html @@ -19,7 +19,7 @@

    NOTES

    r.out.xyz can combine several input raster maps, which can be convenient when it comes to e.g. produce ASCII point cloud files.

    -r.out.xyz is simply a front-end to "r.stats -1g[n]". +r.out.xyz is simply a front-end to "r.stats -1g[n]".

    EXAMPLES

    diff --git a/scripts/r.pack/r.pack.py b/scripts/r.pack/r.pack.py index 5029f93c6d2..1f517a427af 100644 --- a/scripts/r.pack/r.pack.py +++ b/scripts/r.pack/r.pack.py @@ -63,8 +63,8 @@ def main(): global tmp tmp = grass.tempdir() - tmp_dir = os.path.join(tmp, infile) - os.mkdir(tmp_dir) + tmp_dir = Path(tmp, infile) + tmp_dir.mkdir(exist_ok=True) grass.debug("tmp_dir = %s" % tmp_dir) gfile = grass.find_file(name=infile, element="cell", mapset=mapset) @@ -150,7 +150,7 @@ def main(): os.path.join(f_tmp_dir, element), ) - if not os.listdir(tmp_dir): + if not any(tmp_dir.iterdir()): grass.fatal(_("No raster map components found")) # copy projection info diff --git a/scripts/r.plane/r.plane.html b/scripts/r.plane/r.plane.html index 714272038e9..385c6a1fb11 100644 --- a/scripts/r.plane/r.plane.html +++ b/scripts/r.plane/r.plane.html @@ -12,7 +12,7 @@

    DESCRIPTION

    NOTES

    -g.region -c provides the easting and northing coordinates for +g.region -c provides the easting and northing coordinates for the center of the current region.

    CELL (integer) maps take less disk space than FCELLs (floating point), diff --git a/scripts/r.semantic.label/r.semantic.label.html b/scripts/r.semantic.label/r.semantic.label.html index 45ac7b9373e..f36b93eb6b1 100644 --- a/scripts/r.semantic.label/r.semantic.label.html +++ b/scripts/r.semantic.label/r.semantic.label.html @@ -79,7 +79,7 @@

    SEE ALSO

    i.band.library, r.info, - r.support + r.support

    AUTHORS

    diff --git a/scripts/r.shade/r.shade.html b/scripts/r.shade/r.shade.html index 9004b13f26a..1583ecb3e32 100644 --- a/scripts/r.shade/r.shade.html +++ b/scripts/r.shade/r.shade.html @@ -39,8 +39,8 @@

    NOTES

    EXAMPLES

    -In this example, the aspect map in the North Carolina sample -dataset is used to hillshade the elevation map: +In this example, the aspect map in the North Carolina sample +dataset is used to hillshade the elevation map:
     g.region raster=aspect -p
    @@ -66,7 +66,7 @@ 

    EXAMPLES

    Interesting visualizations can be created using different color tables for -elevation raster map, for example using haxby color table. +elevation raster map, for example using haxby color table. diff --git a/scripts/v.in.geonames/v.in.geonames.html b/scripts/v.in.geonames/v.in.geonames.html index a38aa770cc5..c8383c4bc3f 100644 --- a/scripts/v.in.geonames/v.in.geonames.html +++ b/scripts/v.in.geonames/v.in.geonames.html @@ -3,7 +3,7 @@

    DESCRIPTION

    v.in.geonames imports Geonames.org country files (Gazetteer data) into a GRASS vector points map. The country files can be downloaded from the -GeoNames Data Dump +GeoNames Data Dump Server. Only original files can be processed (unzip compressed file first). These Geonames files are encoded in UTF-8 which is maintained in the GRASS database. @@ -35,8 +35,8 @@

    NOTES

    alternatenames : alternatenames, comma separated varchar(4000) latitude : latitude in decimal degrees (wgs84) longitude : longitude in decimal degrees (wgs84) -feature class : see http://www.geonames.org/export/codes.html, char(1) -feature code : see http://www.geonames.org/export/codes.html, varchar(10) +feature class : see https://www.geonames.org/export/codes.html, char(1) +feature code : see https://www.geonames.org/export/codes.html, varchar(10) country code : ISO-3166 2-letter country code, 2 characters cc2 : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters admin1 code : fipscode (subject to change to iso code), isocode for the us and ch, see file admin1Codes.txt for display names of this code; varchar(20) @@ -46,7 +46,7 @@

    NOTES

    population : integer elevation : in meters, integer gtopo30 : average elevation of 30'x30' (ca 900mx900m) area in meters, integer -timezone : the timezone id (see file http://download.geonames.org/export/dump/timeZones.txt) +timezone : the timezone id (see file https://download.geonames.org/export/dump/timeZones.txt) modification date : date of last modification in yyyy-MM-dd format @@ -55,7 +55,7 @@

    EXAMPLE

    Download and import geonames for Czech Republic.
    -wget http://download.geonames.org/export/dump/CZ.zip
    +wget https://download.geonames.org/export/dump/CZ.zip
     unzip CZ.zip
     
     v.in.geonames input=CZ.txt output=geonames_cz
    @@ -64,8 +64,8 @@ 

    EXAMPLE

    REFERENCES

    SEE ALSO

    diff --git a/scripts/v.in.geonames/v.in.geonames.py b/scripts/v.in.geonames/v.in.geonames.py index b6f5f47ace0..cf2e8c99705 100755 --- a/scripts/v.in.geonames/v.in.geonames.py +++ b/scripts/v.in.geonames/v.in.geonames.py @@ -8,9 +8,9 @@ # Converted to Python by Glynn Clements # # PURPOSE: Import geonames.org dumps -# http://download.geonames.org/export/dump/ +# https://download.geonames.org/export/dump/ # -# Feature Codes: http://www.geonames.org/export/codes.html +# Feature Codes: https://www.geonames.org/export/codes.html # # COPYRIGHT: (c) 2008-2014 Markus Neteler, GRASS Development Team # @@ -69,7 +69,7 @@ def main(): gs.message(_("Converting %d place names...") % num_places) # pump data into GRASS: - # http://download.geonames.org/export/dump/readme.txt + # https://download.geonames.org/export/dump/readme.txt # The main 'geoname' table has the following fields : # --------------------------------------------------- # geonameid : integer id of record in geonames database @@ -78,8 +78,8 @@ def main(): # alternatenames : alternatenames, comma separated varchar(4000) # latitude : latitude in decimal degrees (wgs84) # longitude : longitude in decimal degrees (wgs84) - # feature class : see http://www.geonames.org/export/codes.html, char(1) - # feature code : see http://www.geonames.org/export/codes.html, varchar(10) + # feature class : see https://www.geonames.org/export/codes.html, char(1) + # feature code : see https://www.geonames.org/export/codes.html, varchar(10) # country code : ISO-3166 2-letter country code, 2 characters # cc2 : alternate country codes, comma separated, ISO-3166 2-letter country code, 60 characters # admin1 code : fipscode (subject to change to iso code), isocode for the us and ch, see file admin1Codes.txt for display names of this code; varchar(20) @@ -89,7 +89,7 @@ def main(): # population : integer # elevation : in meters, integer # gtopo30 : average elevation of 30'x30' (ca 900mx900m) area in meters, integer - # timezone : the timezone id (see file http://download.geonames.org/export/dump/timeZones.txt) + # timezone : the timezone id (see file https://download.geonames.org/export/dump/timeZones.txt) # modification date : date of last modification in yyyy-MM-dd format # geonameid|name|asciiname|alternatenames|latitude|longitude|featureclass|featurecode|countrycode|cc2|admin1code|admin2code|admin3code|admin4code|population|elevation|gtopo30|timezone|modificationdate diff --git a/scripts/v.in.lines/v.in.lines.html b/scripts/v.in.lines/v.in.lines.html index 2dff0a8263f..86a87c6dbcb 100644 --- a/scripts/v.in.lines/v.in.lines.html +++ b/scripts/v.in.lines/v.in.lines.html @@ -5,7 +5,7 @@

    DESCRIPTION

    NOTES

    Input ASCII coordinates are simply a series of "x y" data points. -Lines are separated by a row containing "NaN NaN". +Lines are separated by a row containing "NaN NaN".

    The user can import 3D lines by providing 3 columns of data in the input stream and using the -z flag. diff --git a/scripts/v.in.mapgen/v.in.mapgen.html b/scripts/v.in.mapgen/v.in.mapgen.html index af0b4a51c06..52f874cfe77 100644 --- a/scripts/v.in.mapgen/v.in.mapgen.html +++ b/scripts/v.in.mapgen/v.in.mapgen.html @@ -7,9 +7,9 @@

    NOTES

    This module only imports data into vector lines.

    The user can get coastline data in Mapgen or Matlab format from NOAA's Coastline -Extractor at http://www.ngdc.noaa.gov/mgg/shorelines/shorelines.html. +Extractor at https://www.ngdc.noaa.gov/mgg/shorelines/shorelines.html.

    Matlab vector line maps are simply a series of "x y" data points. Lines -are separated by a row containing NaN NaN. +are separated by a row containing NaN NaN. Output from Matlab with this command:
    diff --git a/scripts/v.in.wfs/v.in.wfs.html b/scripts/v.in.wfs/v.in.wfs.html index eaa96c13780..fbee44ea6eb 100644 --- a/scripts/v.in.wfs/v.in.wfs.html +++ b/scripts/v.in.wfs/v.in.wfs.html @@ -35,7 +35,7 @@

    WFS import with API key

    v.in.wfs -l url="$URL"
    -From that file we learn that the shipwreck layer is called "v:x633" +From that file we learn that the shipwreck layer is called "v:x633" and that EPSG code 4326 (LatLong WGS84) is a supported SRS for this data layer.
    diff --git a/scripts/v.in.wfs/v.in.wfs.py b/scripts/v.in.wfs/v.in.wfs.py
    index 9f53840edcf..ead40698fbd 100755
    --- a/scripts/v.in.wfs/v.in.wfs.py
    +++ b/scripts/v.in.wfs/v.in.wfs.py
    @@ -154,12 +154,10 @@ def main():
         if flags["l"]:
             wfs_url = options["url"] + "REQUEST=GetCapabilities&SERVICE=WFS"
     
    -    print(wfs_url)
    -
         tmp = grass.tempfile()
         tmpxml = tmp + ".xml"
     
    -    grass.debug(wfs_url)
    +    grass.debug("The request URL: {wfs_url}")
     
         # Set user and password if given
         if options["username"] and options["password"]:
    @@ -229,7 +227,14 @@ def main():
                 grass.run_command("v.in.ogr", flags="o", input=tmpxml, output=out)
             grass.message(_("Vector map <%s> imported from WFS.") % out)
         except Exception:
    +        import xml.etree.ElementTree as ET
    +
             grass.message(_("WFS import failed"))
    +
    +        root = ET.parse(tmpxml).getroot()
    +        if "ServiceExceptionReport" in root.tag:
    +            se = root.find(root.tag[:-6])  # strip "Report" from the tag
    +            grass.message(se.text.strip())
         finally:
             try_remove(tmpxml)
     
    diff --git a/scripts/v.rast.stats/v.rast.stats.html b/scripts/v.rast.stats/v.rast.stats.html
    index 966f7ae1a00..f9093158757 100644
    --- a/scripts/v.rast.stats/v.rast.stats.html
    +++ b/scripts/v.rast.stats/v.rast.stats.html
    @@ -24,8 +24,8 @@ 

    NOTES

    The script stops if a (prefixed) upload column is already present in the vector map attribute table, unless otherwise instructed with the -c continue flag. The column prefix will be separated from the statistic name -with an underscore. For example with a prefix of "elev" the sum -column will be named elev_sum. +with an underscore. For example with a prefix of "elev" the sum +column will be named elev_sum.

    If a DBF database is being used, note that column names are restricted by the DBF specification to 10 characters. Therefore it is advised to be economical in the use of the column prefix when using DBF as any additional characters @@ -81,4 +81,4 @@

    SEE ALSO

    AUTHOR

    -Markus Neteler, CEA (for the EDEN EU/FP6 Project) +Markus Neteler, CEA (for the EDEN EU/FP6 Project) diff --git a/scripts/v.what.vect/v.what.vect.html b/scripts/v.what.vect/v.what.vect.html index 0a4a88cca49..353431e3ce6 100644 --- a/scripts/v.what.vect/v.what.vect.html +++ b/scripts/v.what.vect/v.what.vect.html @@ -41,7 +41,7 @@

    EXAMPLES

    In this example, city names, population data and others from -Geonames.org country files are +Geonames.org country files are transferred to selected EU CORINE landuse/landcover classes ("Continuous urban fabric", 111, and "Discontinuous urban fabric", 112). Note: The example is in UTM projection to which the input maps have been diff --git a/scripts/wxpyimgview/wxpyimgview_gui.py b/scripts/wxpyimgview/wxpyimgview_gui.py index b0f89deee80..b5338dc0db8 100644 --- a/scripts/wxpyimgview/wxpyimgview_gui.py +++ b/scripts/wxpyimgview/wxpyimgview_gui.py @@ -117,12 +117,14 @@ def read_bmp_header(self, header): magic, bmfh, bmih = struct.unpack("2s12s40s10x", header) if gs.decode(magic) != "BM": - raise SyntaxError("Invalid magic number") + msg = "Invalid magic number" + raise SyntaxError(msg) size, res1, res2, hsize = struct.unpack("NOTES Setting the connection with t.connect will not test the connection for validity. Hence a database connection will not be established.

    -The connection values are stored in the mapset's VAR file. +The connection values are stored in the mapset's VAR file. The -d flag will set the default TGIS connection parameters. A SQLite database "tgis/sqlite.db" will be created in the current mapset directory. diff --git a/temporal/t.copy/t.copy.html b/temporal/t.copy/t.copy.html index 9be8115a1e2..290f7661e3a 100644 --- a/temporal/t.copy/t.copy.html +++ b/temporal/t.copy/t.copy.html @@ -37,9 +37,4 @@

    SEE ALSO

    AUTHOR

    -Markus Metz, mundialis - - +Markus Metz, mundialis diff --git a/temporal/t.list/t.list.html b/temporal/t.list/t.list.html index 7d88ecb8fdd..38e11b2d3e7 100644 --- a/temporal/t.list/t.list.html +++ b/temporal/t.list/t.list.html @@ -50,8 +50,8 @@

    EXAMPLES

    The where option can also be used to list the stds with a -certain pattern in their name, i.e. as the pattern option in g.list. +certain pattern in their name, i.e. as the pattern option in +g.list.
     # strds whose name start with "precip"
    diff --git a/temporal/t.merge/t.merge.html b/temporal/t.merge/t.merge.html
    index b694d1a3203..ff24d1603f9 100644
    --- a/temporal/t.merge/t.merge.html
    +++ b/temporal/t.merge/t.merge.html
    @@ -1,7 +1,7 @@
     

    DESCRIPTION

    -This module is designed to register the maps of several input space -time datasets in a single output dataset. The datasets to merge can be +The t.merge module is designed to register the maps of several input +space time datasets in a single output dataset. The datasets to merge can be either space time raster, 3D raster or vector datasets and must have the same temporal type (absolute or relative).

    diff --git a/temporal/t.rast.accdetect/t.rast.accdetect.py b/temporal/t.rast.accdetect/t.rast.accdetect.py index 17433e06f3f..8d70bf255f2 100644 --- a/temporal/t.rast.accdetect/t.rast.accdetect.py +++ b/temporal/t.rast.accdetect/t.rast.accdetect.py @@ -402,7 +402,7 @@ def main(): prev_map = curr_map subexpr1 = "null()" subexpr3 = "%i" % (indicator_start) - elif i > 0 and i < num_maps - 1: + elif 0 < i < num_maps - 1: prev_map = occurrence_maps[map.next().get_id()].get_name() next_map = occurrence_maps[map.prev().get_id()].get_name() # In case the previous map is null() set null() or the start @@ -444,7 +444,7 @@ def main(): prev_map = curr_map subexpr1 = "null()" subexpr3 = "%i" % (indicator_start) - elif i > 0 and i < num_maps - 1: + elif 0 < i < num_maps - 1: prev_map = occurrence_maps[map.prev().get_id()].get_name() next_map = occurrence_maps[map.next().get_id()].get_name() # In case the previous map is null() set null() or the start diff --git a/temporal/t.rast.accumulate/t.rast.accumulate.html b/temporal/t.rast.accumulate/t.rast.accumulate.html index e36d6a79c57..52c37b5e9f0 100644 --- a/temporal/t.rast.accumulate/t.rast.accumulate.html +++ b/temporal/t.rast.accumulate/t.rast.accumulate.html @@ -70,7 +70,7 @@

    EXAMPLE

    # Import the temperature data t.rast.import input=temperature_mean_1990_2000_daily_celsius.tar.gz \ - output=temperature_mean_1990_2000_daily_celsius directory=/tmp + output=temperature_mean_1990_2000_daily_celsius directory=/tmp # We need to set the region correctly g.region -p raster=`t.rast.list input=temperature_mean_1990_2000_daily_celsius column=name | tail -1` @@ -93,9 +93,9 @@

    EXAMPLE

    # a granularity of one day. Base temperature is 10°C, upper limit is 30°C. # Hence the accumulation starts at 10°C and does not accumulate values above 30°C. t.rast.accumulate input="temperature_mean_1990_2000_daily_celsius" \ - output="temperature_mean_1990_2000_daily_celsius_accumulated_10_30" \ - limits="10,30" start="1990-01-01" stop="2000-01-01" cycle="12 months" \ - basename="temp_acc_daily_10_30" method="bedd" + output="temperature_mean_1990_2000_daily_celsius_accumulated_10_30" \ + limits="10,30" start="1990-01-01" stop="2000-01-01" cycle="12 months" \ + basename="temp_acc_daily_10_30" method="bedd" ############################################################################# #### ACCUMULATION PATTERN DETECTION ######################################### @@ -104,18 +104,18 @@

    EXAMPLE

    # First cycle at 325°C - 427°C GDD t.rast.accdetect input=temperature_mean_1990_2000_daily_celsius_accumulated_10_30@PERMANENT \ - occ=leafhopper_occurrence_c1_1990_2000 start="1990-01-01" stop="2000-01-01" \ - cycle="12 months" range=325,427 basename=lh_c1 indicator=leafhopper_indicator_c1_1990_2000 + occ=leafhopper_occurrence_c1_1990_2000 start="1990-01-01" stop="2000-01-01" \ + cycle="12 months" range=325,427 basename=lh_c1 indicator=leafhopper_indicator_c1_1990_2000 # Second cycle at 685°C - 813°C GDD t.rast.accdetect input=temperature_mean_1990_2000_daily_celsius_accumulated_10_30@PERMANENT \ - occ=leafhopper_occurrence_c2_1990_2000 start="1990-01-01" stop="2000-01-01" \ - cycle="12 months" range=685,813 basename=lh_c2 indicator=leafhopper_indicator_c2_1990_2000 + occ=leafhopper_occurrence_c2_1990_2000 start="1990-01-01" stop="2000-01-01" \ + cycle="12 months" range=685,813 basename=lh_c2 indicator=leafhopper_indicator_c2_1990_2000 # Third cycle at 1047°C - 1179°C GDD t.rast.accdetect input=temperature_mean_1990_2000_daily_celsius_accumulated_10_30@PERMANENT \ - occ=leafhopper_occurrence_c3_1990_2000 start="1990-01-01" stop="2000-01-01" \ - cycle="12 months" range=1047,1179 basename=lh_c3 indicator=leafhopper_indicator_c3_1990_2000 + occ=leafhopper_occurrence_c3_1990_2000 start="1990-01-01" stop="2000-01-01" \ + cycle="12 months" range=1047,1179 basename=lh_c3 indicator=leafhopper_indicator_c3_1990_2000 ############################################################################# @@ -124,36 +124,36 @@

    EXAMPLE

    # Extract the areas that have full cycles t.rast.aggregate input=leafhopper_indicator_c1_1990_2000 gran="1 year" \ - output=leafhopper_cycle_1_1990_2000_yearly method=maximum basename=li_c1 + output=leafhopper_cycle_1_1990_2000_yearly method=maximum basename=li_c1 t.rast.mapcalc input=leafhopper_cycle_1_1990_2000_yearly basename=lh_clean_c1 \ - output=leafhopper_cycle_1_1990_2000_yearly_clean \ - expression="if(leafhopper_cycle_1_1990_2000_yearly == 3, 1, null())" + output=leafhopper_cycle_1_1990_2000_yearly_clean \ + expression="if(leafhopper_cycle_1_1990_2000_yearly == 3, 1, null())" t.rast.aggregate input=leafhopper_indicator_c2_1990_2000 gran="1 year" \ - output=leafhopper_cycle_2_1990_2000_yearly method=maximum basename=li_c2 + output=leafhopper_cycle_2_1990_2000_yearly method=maximum basename=li_c2 t.rast.mapcalc input=leafhopper_cycle_2_1990_2000_yearly basename=lh_clean_c2 \ - output=leafhopper_cycle_2_1990_2000_yearly_clean \ - expression="if(leafhopper_cycle_2_1990_2000_yearly == 3, 2, null())" + output=leafhopper_cycle_2_1990_2000_yearly_clean \ + expression="if(leafhopper_cycle_2_1990_2000_yearly == 3, 2, null())" t.rast.aggregate input=leafhopper_indicator_c3_1990_2000 gran="1 year" \ - output=leafhopper_cycle_3_1990_2000_yearly method=maximum basename=li_c3 + output=leafhopper_cycle_3_1990_2000_yearly method=maximum basename=li_c3 t.rast.mapcalc input=leafhopper_cycle_3_1990_2000_yearly basename=lh_clean_c3 \ - output=leafhopper_cycle_3_1990_2000_yearly_clean \ - expression="if(leafhopper_cycle_3_1990_2000_yearly == 3, 3, null())" + output=leafhopper_cycle_3_1990_2000_yearly_clean \ + expression="if(leafhopper_cycle_3_1990_2000_yearly == 3, 3, null())" t.rast.mapcalc input=leafhopper_cycle_1_1990_2000_yearly_clean,leafhopper_cycle_2_1990_2000_yearly_clean,leafhopper_cycle_3_1990_2000_yearly_clean \ - basename=lh_cleann_all_cycles \ - output=leafhopper_all_cycles_1990_2000_yearly_clean \ - expression="if(isnull(leafhopper_cycle_3_1990_2000_yearly_clean), \ - if(isnull(leafhopper_cycle_2_1990_2000_yearly_clean), \ - if(isnull(leafhopper_cycle_1_1990_2000_yearly_clean), \ - null() ,1),2),3)" - -cat > color.table << EOF + basename=lh_cleann_all_cycles \ + output=leafhopper_all_cycles_1990_2000_yearly_clean \ + expression="if(isnull(leafhopper_cycle_3_1990_2000_yearly_clean), \ + if(isnull(leafhopper_cycle_2_1990_2000_yearly_clean), \ + if(isnull(leafhopper_cycle_1_1990_2000_yearly_clean), \ + null() ,1),2),3)" + +cat > color.table << EOF 3 yellow 2 blue 1 red @@ -170,35 +170,35 @@

    EXAMPLE

    # Extract the duration in days of the first cycle t.rast.aggregate input=leafhopper_occurrence_c1_1990_2000 gran="1 year" \ - output=leafhopper_min_day_c1_1990_2000 method=minimum basename=occ_min_day_c1 + output=leafhopper_min_day_c1_1990_2000 method=minimum basename=occ_min_day_c1 t.rast.aggregate input=leafhopper_occurrence_c1_1990_2000 gran="1 year" \ - output=leafhopper_max_day_c1_1990_2000 method=maximum basename=occ_max_day_c1 + output=leafhopper_max_day_c1_1990_2000 method=maximum basename=occ_max_day_c1 t.rast.mapcalc input=leafhopper_min_day_c1_1990_2000,leafhopper_max_day_c1_1990_2000 \ - basename=occ_duration_c1 \ - output=leafhopper_duration_c1_1990_2000 \ - expression="leafhopper_max_day_c1_1990_2000 - leafhopper_min_day_c1_1990_2000" + basename=occ_duration_c1 \ + output=leafhopper_duration_c1_1990_2000 \ + expression="leafhopper_max_day_c1_1990_2000 - leafhopper_min_day_c1_1990_2000" # Extract the duration in days of the second cycle t.rast.aggregate input=leafhopper_occurrence_c2_1990_2000 gran="1 year" \ - output=leafhopper_min_day_c2_1990_2000 method=minimum basename=occ_min_day_c2 + output=leafhopper_min_day_c2_1990_2000 method=minimum basename=occ_min_day_c2 t.rast.aggregate input=leafhopper_occurrence_c2_1990_2000 gran="1 year" \ - output=leafhopper_max_day_c2_1990_2000 method=maximum basename=occ_max_day_c2 + output=leafhopper_max_day_c2_1990_2000 method=maximum basename=occ_max_day_c2 t.rast.mapcalc input=leafhopper_min_day_c2_1990_2000,leafhopper_max_day_c2_1990_2000 \ - basename=occ_duration_c2 \ - output=leafhopper_duration_c2_1990_2000 \ - expression="leafhopper_max_day_c2_1990_2000 - leafhopper_min_day_c2_1990_2000" + basename=occ_duration_c2 \ + output=leafhopper_duration_c2_1990_2000 \ + expression="leafhopper_max_day_c2_1990_2000 - leafhopper_min_day_c2_1990_2000" # Extract the duration in days of the third cycle t.rast.aggregate input=leafhopper_occurrence_c3_1990_2000 gran="1 year" \ - output=leafhopper_min_day_c3_1990_2000 method=minimum basename=occ_min_day_c3 + output=leafhopper_min_day_c3_1990_2000 method=minimum basename=occ_min_day_c3 t.rast.aggregate input=leafhopper_occurrence_c3_1990_2000 gran="1 year" \ - output=leafhopper_max_day_c3_1990_2000 method=maximum basename=occ_max_day_c3 + output=leafhopper_max_day_c3_1990_2000 method=maximum basename=occ_max_day_c3 t.rast.mapcalc input=leafhopper_min_day_c3_1990_2000,leafhopper_max_day_c3_1990_2000 \ - basename=occ_duration_c3 \ - output=leafhopper_duration_c3_1990_2000 \ - expression="leafhopper_max_day_c3_1990_2000 - leafhopper_min_day_c3_1990_2000" + basename=occ_duration_c3 \ + output=leafhopper_duration_c3_1990_2000 \ + expression="leafhopper_max_day_c3_1990_2000 - leafhopper_min_day_c3_1990_2000" t.rast.colors input=leafhopper_duration_c1_1990_2000 color=rainbow t.rast.colors input=leafhopper_duration_c2_1990_2000 color=rainbow @@ -212,35 +212,35 @@

    EXAMPLE

    # First cycle t.rast.aggregate input=leafhopper_indicator_c1_1990_2000 gran="1 month" \ - output=leafhopper_indi_min_month_c1_1990_2000 method=minimum basename=occ_indi_min_month_c1 + output=leafhopper_indi_min_month_c1_1990_2000 method=minimum basename=occ_indi_min_month_c1 t.rast.aggregate input=leafhopper_indicator_c1_1990_2000 gran="1 month" \ - output=leafhopper_indi_max_month_c1_1990_2000 method=maximum basename=occ_indi_max_month_c1 + output=leafhopper_indi_max_month_c1_1990_2000 method=maximum basename=occ_indi_max_month_c1 t.rast.mapcalc input=leafhopper_indi_min_month_c1_1990_2000,leafhopper_indi_max_month_c1_1990_2000 \ - basename=indicator_monthly_c1 \ - output=leafhopper_monthly_indicator_c1_1990_2000 \ - expression="if(leafhopper_indi_min_month_c1_1990_2000 == 1, 1, if(leafhopper_indi_max_month_c1_1990_2000 == 3, 3, 2))" + basename=indicator_monthly_c1 \ + output=leafhopper_monthly_indicator_c1_1990_2000 \ + expression="if(leafhopper_indi_min_month_c1_1990_2000 == 1, 1, if(leafhopper_indi_max_month_c1_1990_2000 == 3, 3, 2))" # Second cycle t.rast.aggregate input=leafhopper_indicator_c2_1990_2000 gran="1 month" \ - output=leafhopper_indi_min_month_c2_1990_2000 method=minimum basename=occ_indi_min_month_c2 + output=leafhopper_indi_min_month_c2_1990_2000 method=minimum basename=occ_indi_min_month_c2 t.rast.aggregate input=leafhopper_indicator_c2_1990_2000 gran="1 month" \ - output=leafhopper_indi_max_month_c2_1990_2000 method=maximum basename=occ_indi_max_month_c2 + output=leafhopper_indi_max_month_c2_1990_2000 method=maximum basename=occ_indi_max_month_c2 t.rast.mapcalc input=leafhopper_indi_min_month_c2_1990_2000,leafhopper_indi_max_month_c2_1990_2000 \ - basename=indicator_monthly_c2 \ - output=leafhopper_monthly_indicator_c2_1990_2000 \ - expression="if(leafhopper_indi_min_month_c2_1990_2000 == 1, 1, if(leafhopper_indi_max_month_c2_1990_2000 == 3, 3, 2))" + basename=indicator_monthly_c2 \ + output=leafhopper_monthly_indicator_c2_1990_2000 \ + expression="if(leafhopper_indi_min_month_c2_1990_2000 == 1, 1, if(leafhopper_indi_max_month_c2_1990_2000 == 3, 3, 2))" # Third cycle t.rast.aggregate input=leafhopper_indicator_c3_1990_2000 gran="1 month" \ - output=leafhopper_indi_min_month_c3_1990_2000 method=minimum basename=occ_indi_min_month_c3 + output=leafhopper_indi_min_month_c3_1990_2000 method=minimum basename=occ_indi_min_month_c3 t.rast.aggregate input=leafhopper_indicator_c3_1990_2000 gran="1 month" \ - output=leafhopper_indi_max_month_c3_1990_2000 method=maximum basename=occ_indi_max_month_c3 + output=leafhopper_indi_max_month_c3_1990_2000 method=maximum basename=occ_indi_max_month_c3 t.rast.mapcalc input=leafhopper_indi_min_month_c3_1990_2000,leafhopper_indi_max_month_c3_1990_2000 \ - basename=indicator_monthly_c3 \ - output=leafhopper_monthly_indicator_c3_1990_2000 \ - expression="if(leafhopper_indi_min_month_c3_1990_2000 == 1, 1, if(leafhopper_indi_max_month_c3_1990_2000 == 3, 3, 2))" + basename=indicator_monthly_c3 \ + output=leafhopper_monthly_indicator_c3_1990_2000 \ + expression="if(leafhopper_indi_min_month_c3_1990_2000 == 1, 1, if(leafhopper_indi_max_month_c3_1990_2000 == 3, 3, 2))" -cat > color.table << EOF +cat > color.table << EOF 3 red 2 yellow 1 green diff --git a/temporal/t.rast.aggregate.ds/t.rast.aggregate.ds.html b/temporal/t.rast.aggregate.ds/t.rast.aggregate.ds.html index d7efc4a5bf0..b76274c76d3 100644 --- a/temporal/t.rast.aggregate.ds/t.rast.aggregate.ds.html +++ b/temporal/t.rast.aggregate.ds/t.rast.aggregate.ds.html @@ -42,7 +42,7 @@

    Precipitation aggregation

    for map in ${MAPS} ; do r.mapcalc expression="${map} = 1" - echo ${map} >> map_list.txt + echo ${map} >> map_list.txt done t.create type=strds temporaltype=absolute \ @@ -262,7 +262,7 @@

    MODIS satellite sensor daily data aggregation to 8 days

    # to a YYYY-MM-DD date for start and end, and create a file with # mapnames, start date and end date -g.list type=raster pattern=8day_20??_* > names_list +g.list type=raster pattern=8day_20??_* > names_list for NAME in `cat names_list` ; do @@ -277,10 +277,10 @@

    MODIS satellite sensor daily data aggregation to 8 days

    if [ $DOY -le "353" ] ; then doy_end=$(( $DOY + 8 )) elif [ $DOY -eq "361" ] ; then - if [ $[$YEAR % 4] -eq 0 ] && [ $[$YEAR % 100] -ne 0 ] || [ $[$YEAR % 400] -eq 0 ] ; then + if [ $[$YEAR % 4] -eq 0 ] && [ $[$YEAR % 100] -ne 0 ] || [ $[$YEAR % 400] -eq 0 ] ; then doy_end=$(( $DOY + 6 )) else - doy_end=$(( $DOY + 5 )) + doy_end=$(( $DOY + 5 )) fi fi @@ -288,7 +288,7 @@

    MODIS satellite sensor daily data aggregation to 8 days

    DATE_END=`date -d "${YEAR}-01-01 +$(( ${doy_end} -1 ))days" +%Y-%m-%d` # text file with mapnames, start date and end date - echo "$NAME|$DATE_START|$DATE_END" >> list_map_start_end_time.txt + echo "$NAME|$DATE_START|$DATE_END" >> list_map_start_end_time.txt done diff --git a/temporal/t.rast.algebra/t.rast.algebra.html b/temporal/t.rast.algebra/t.rast.algebra.html index b7fda851224..e5786d9578c 100644 --- a/temporal/t.rast.algebra/t.rast.algebra.html +++ b/temporal/t.rast.algebra/t.rast.algebra.html @@ -230,11 +230,11 @@

    Logical operators

    == equal != not equal - > greater than - >= greater than or equal - < less than - <= less than or equal - && and + > greater than + >= greater than or equal + < less than + <= less than or equal + && and || or
    @@ -291,26 +291,26 @@

    Comparison operator

    aggregation operator: {"comparison operator", "topological relations", aggregation operator, "temporal operator"}
    -This aggregation operator (| or &) defines the behaviour when a map is +This aggregation operator (| or &) defines the behaviour when a map is related to more than one map, e.g. for the topological relation 'contains'. -Should all (&) conditions for the related maps be true or is it sufficient +Should all (&) conditions for the related maps be true or is it sufficient to have any (|) condition that is true. The resulting boolean value is -then compared to the first condition by the comparison operator (|| or &&). +then compared to the first condition by the comparison operator (|| or &&). By default, the aggregation operator is related to the comparison operator:
    -comparison operator -> aggregation operator: +comparison operator -> aggregation operator:
    -|| -> | and && -> &
    +|| -> | and && -> &
     
    Examples:
     Condition 1 {||, equal, r} Condition 2
    -Condition 1 {&&, equal|during, l} Condition 2
    -Condition 1 {&&, equal|contains, |, l} Condition 2
    -Condition 1 {&&, equal|during, l} Condition 2 && Condition 3
    -Condition 1 {&&, equal|during, l} Condition 2 {&&,contains, |, r} Condition 3
    +Condition 1 {&&, equal|during, l} Condition 2
    +Condition 1 {&&, equal|contains, |, l} Condition 2
    +Condition 1 {&&, equal|during, l} Condition 2 && Condition 3
    +Condition 1 {&&, equal|during, l} Condition 2 {&&,contains, |, r} Condition 3
     

    Hash operator

    @@ -328,7 +328,7 @@

    Hash operator

    maps from B will be returned.
    -C = if({equal}, A {#, contains} B > 2, A {:, contains} B)
    +C = if({equal}, A {#, contains} B > 2, A {:, contains} B)
     
    This expression selects all maps from A that temporally contain at least 2 @@ -434,13 +434,13 @@

    Combinations of temporal, raster and select operators

    a1 of A:
    - C = A {+, contains} B --> c1 = a1 + b1 + b2 + b3
    + C = A {+, contains} B --> c1 = a1 + b1 + b2 + b3
     

    Important: the aggregation behaviour is not symmetric

    - C = B {+, during} A --> c1 = b1 + a1
    + C = B {+, during} A --> c1 = b1 + a1
                              c2 = b2 + a1
                              c3 = b3 + a1
     
    @@ -497,7 +497,7 @@

    Sum of space-time raster datasets

    Sum maps from STRDS A with maps from STRDS B which have equal time stamps and are temporally before Jan. 1. 2005 and store them in STRDS D:
    -D = if(start_date(A) < "2005-01-01", A + B)
    +D = if(start_date(A) < "2005-01-01", A + B)
     
    Create the sum of all maps from STRDS A and B that have equal time stamps @@ -520,7 +520,7 @@

    Selection of raster cells

    the cells of A are in the range [100.0, 1600] of time intervals that have more than 30 days (Jan, Mar, May, Jul, Aug, Oct, Dec):
    -C = if(A > 100 && A < 1600 && td(A) > 30, B)
    +C = if(A > 100 && A < 1600 && td(A) > 30, B)
     

    Selection of raster cells with temporal topology relation

    @@ -528,7 +528,7 @@

    Selection of raster cells with temporal topology relation

    Same expression with explicit definition of the temporal topology relation and temporal operators:
    -C = if({equal}, A > 100 && A < 1600 {&&,equal} td(A) > 30, B)
    +C = if({equal}, A > 100 && A < 1600 {&&,equal} td(A) > 30, B)
     

    Conditional computation

    @@ -539,7 +539,7 @@

    Conditional computation

    equal time stamps. The number of days or fraction of days per interval is computed using the td() function that has as argument the STRDS "Prec":
    -C = if(Temp > 10.0, Prec / 3600.0 / 24.0 / td(Prec))
    +C = if(Temp > 10.0, Prec / 3600.0 / 24.0 / td(Prec))
     

    Conditional computation with temporal topology relation

    @@ -547,7 +547,7 @@

    Conditional computation with temporal topology relation

    Same expression with explicit definition of the temporal topology relation and temporal operators:
    -C = if({equal}, Temp > 10.0, Prec / 3600.0 / 24.0 {/,equal,l} td(Prec))
    +C = if({equal}, Temp > 10.0, Prec / 3600.0 / 24.0 {/,equal,l} td(Prec))
     

    Computation with time intervals

    @@ -555,7 +555,7 @@

    Computation with time intervals

    intervals of STRDS B if more than one map of A is contained in an interval of B, use A otherwise. The resulting time intervals are either from B or A:
    -C = if(B {#,contain} A > 1, (B {+,contain,l} A - B) / (B {#,contain} A), A)
    +C = if(B {#,contain} A > 1, (B {+,contain,l} A - B) / (B {#,contain} A), A)
     

    Computation with time intervals with temporal topology relation

    @@ -563,14 +563,14 @@

    Computation with time intervals with temporal topology relation

    Same expression with explicit definition of the temporal topology relation and temporal operators:
    -C = if({equal}, B {#,contain} A > 1, (B {+,contain,l} A {-,equal,l} B) {equal,=/} (B {#,contain} A), A)
    +C = if({equal}, B {#,contain} A > 1, (B {+,contain,l} A {-,equal,l} B) {equal,=/} (B {#,contain} A), A)
     

    Compute DOY for spatio-temporal conditions

    Compute the DOY for all maps from STRDS A where conditions are met at three -consecutive time intervals (e.g. temperature > 0): +consecutive time intervals (e.g. temperature > 0):
    -B = if(A > 0.0 && A[-1] > 0.0 && A[-2] > 0.0, start_doy(A, -1), 0)"
    +B = if(A > 0.0 && A[-1] > 0.0 && A[-2] > 0.0, start_doy(A, -1), 0)"
     
    @@ -590,7 +590,7 @@

    SEE ALSO

    REFERENCES

    The use of this module requires the following software to be installed: -PLY(Python-Lex-Yacc) +PLY(Python-Lex-Yacc)

    diff --git a/temporal/t.rast.export/t.rast.export.py b/temporal/t.rast.export/t.rast.export.py
    index fdf19541ee4..48e0960ae25 100755
    --- a/temporal/t.rast.export/t.rast.export.py
    +++ b/temporal/t.rast.export/t.rast.export.py
    @@ -115,13 +115,13 @@ def main():
         import grass.temporal as tgis
     
         # Get the options
    -    _input = options["input"]
    +    input_ = options["input"]
         output = options["output"]
         compression = options["compression"]
         directory = options["directory"]
         where = options["where"]
    -    _format = options["format"]
    -    _type = options["type"]
    +    format_ = options["format"]
    +    type_ = options["type"]
         kws = {
             key: options[key] for key in ("createopt", "metaopt", "nodata") if options[key]
         }
    @@ -132,7 +132,7 @@ def main():
         if not os.access(directory, os.W_OK):
             gs.fatal(_("Directory {} is not writable").format(directory))
     
    -    if _type and _format in {"pack", "AAIGrid"}:
    +    if type_ and format_ in {"pack", "AAIGrid"}:
             gs.warning(
                 _("Type options is not working with pack format, it will be skipped")
             )
    @@ -148,7 +148,7 @@ def main():
         tgis.init()
         # Export the space time raster dataset
         tgis.export_stds(
    -        _input, output, compression, directory, where, _format, "strds", _type, **kws
    +        input_, output, compression, directory, where, format_, "strds", type_, **kws
         )
     
     
    diff --git a/temporal/t.rast.extract/t.rast.extract.html b/temporal/t.rast.extract/t.rast.extract.html
    index bc3b0b30f7c..e7dcd7897c8 100644
    --- a/temporal/t.rast.extract/t.rast.extract.html
    +++ b/temporal/t.rast.extract/t.rast.extract.html
    @@ -22,21 +22,18 @@ 

    DESCRIPTION

    NOTES

    The r.mapcalc sub-expression should not contain the left side -"map =" of a full r.mapcalc expression, only the right +"map =" of a full r.mapcalc expression, only the right side, eg.:
    -t.rast.extract input=tempmean_monthly where="start_time > '2010-01-05'" \
    -               output=selected_tempmean_monthly basename=new_tmean_month \
    -               expression="if(tempmean_monthly < 0, null(), tempmean_monthly)"
    -
    +t.rast.extract input=tempmean_monthly where="start_time > '2010-01-05'" output=selected_tempmean_monthly basename=new_tmean_month expression="if(tempmean_monthly < 0, null(), tempmean_monthly)" +

    EXAMPLE

    -t.rast.extract input=tempmean_monthly output=tempmean_monthly_later_2012 \
    -               where="start_time >= '2012-01-01'"
    +t.rast.extract input=tempmean_monthly output=tempmean_monthly_later_2012 where="start_time >= '2012-01-01'"
     
     t.rast.list tempmean_monthly_later_2012
     name|mapset|start_time|end_time
    diff --git a/temporal/t.rast.gapfill/t.rast.gapfill.py b/temporal/t.rast.gapfill/t.rast.gapfill.py
    index 84395da6295..a4d3b3a8c95 100755
    --- a/temporal/t.rast.gapfill/t.rast.gapfill.py
    +++ b/temporal/t.rast.gapfill/t.rast.gapfill.py
    @@ -119,17 +119,17 @@ def main():
     
         # Identify all gaps and create new names
         count = 0
    -    for _map in maps:
    -        if _map.get_id() is None:
    +    for map_ in maps:
    +        if map_.get_id() is None:
                 count += 1
                 if sp.get_temporal_type() == "absolute" and tsuffix in {"gran", "time"}:
    -                _id = "{ba}@{ma}".format(ba=base, ma=mapset)
    +                id_ = "{ba}@{ma}".format(ba=base, ma=mapset)
                 else:
                     map_name = tgis.create_numeric_suffix(base, num + count, tsuffix)
    -                _id = "{name}@{ma}".format(name=map_name, ma=mapset)
    -            _map.set_id(_id)
    +                id_ = "{name}@{ma}".format(name=map_name, ma=mapset)
    +            map_.set_id(id_)
     
    -            gap_list.append(_map)
    +            gap_list.append(map_)
     
         if len(gap_list) == 0:
             gs.message(_("No gaps found"))
    @@ -140,16 +140,16 @@ def main():
         tb.build(maps)
     
         # Do some checks before computation
    -    for _map in gap_list:
    -        if not _map.get_precedes() or not _map.get_follows():
    +    for map_ in gap_list:
    +        if not map_.get_precedes() or not map_.get_follows():
                 gs.fatal(_("Unable to determine successor and predecessor of a gap."))
     
    -        if len(_map.get_precedes()) > 1:
    +        if len(map_.get_precedes()) > 1:
                 gs.warning(
                     _("More than one successor of the gap found. Using the first found.")
                 )
     
    -        if len(_map.get_follows()) > 1:
    +        if len(map_.get_follows()) > 1:
                 gs.warning(
                     _(
                         "More than one predecessor of the gap found. "
    @@ -160,9 +160,9 @@ def main():
         # Interpolate the maps using parallel processing
         result_list = []
     
    -    for _map in gap_list:
    -        predecessor = _map.get_follows()[0]
    -        successor = _map.get_precedes()[0]
    +    for map_ in gap_list:
    +        predecessor = map_.get_follows()[0]
    +        successor = map_.get_precedes()[0]
     
             gran = sp.get_granularity()
             tmpval, start = predecessor.get_temporal_extent_as_tuple()
    @@ -170,7 +170,7 @@ def main():
     
             # Now resample the gap
             map_matrix = tgis.AbstractSpaceTimeDataset.resample_maplist_by_granularity(
    -            (_map,), start, end, gran
    +            (map_,), start, end, gran
             )
     
             map_names = []
    @@ -210,7 +210,7 @@ def main():
                                 "Map with name <%s> already exists. "
                                 "Please use another base name."
                             )
    -                        % (_id)
    +                        % (id_)
                         )
                     elif new_map.is_in_db(dbif):
                         overwrite_flags[new_id] = True
    @@ -235,24 +235,24 @@ def main():
         process_queue.wait()
     
         # Insert new interpolated maps in temporal database and dataset
    -    for _map in result_list:
    -        id = _map.get_id()
    +    for map_ in result_list:
    +        id = map_.get_id()
             if overwrite_flags[id]:
    -            if _map.is_time_absolute():
    -                start, end = _map.get_absolute_time()
    -                if _map.is_in_db():
    -                    _map.delete(dbif)
    -                _map = sp.get_new_map_instance(id)
    -                _map.set_absolute_time(start, end)
    +            if map_.is_time_absolute():
    +                start, end = map_.get_absolute_time()
    +                if map_.is_in_db():
    +                    map_.delete(dbif)
    +                map_ = sp.get_new_map_instance(id)
    +                map_.set_absolute_time(start, end)
                 else:
    -                start, end, unit = _map.get_relative_time()
    -                if _map.is_in_db():
    -                    _map.delete(dbif)
    -                _map = sp.get_new_map_instance(id)
    -                _map.set_relative_time(start, end, unit)
    -        _map.load()
    -        _map.insert(dbif)
    -        sp.register_map(_map, dbif)
    +                start, end, unit = map_.get_relative_time()
    +                if map_.is_in_db():
    +                    map_.delete(dbif)
    +                map_ = sp.get_new_map_instance(id)
    +                map_.set_relative_time(start, end, unit)
    +        map_.load()
    +        map_.insert(dbif)
    +        sp.register_map(map_, dbif)
     
         sp.update_from_registered_maps(dbif)
         sp.update_command_string(dbif=dbif)
    diff --git a/temporal/t.rast.import/testsuite/test_temporal_rast_import.py b/temporal/t.rast.import/testsuite/test_temporal_rast_import.py
    index 15e66058a56..4a0decbf82d 100644
    --- a/temporal/t.rast.import/testsuite/test_temporal_rast_import.py
    +++ b/temporal/t.rast.import/testsuite/test_temporal_rast_import.py
    @@ -1,9 +1,3 @@
    -"""
    -Created on Fri Feb 26 14:46:06 2016
    -
    -@author: lucadelu
    -"""
    -
     """
     Test t.rast.import
     
    diff --git a/temporal/t.rast.mapcalc/t.rast.mapcalc.html b/temporal/t.rast.mapcalc/t.rast.mapcalc.html
    index c26f2e9c95a..dbef6373aff 100644
    --- a/temporal/t.rast.mapcalc/t.rast.mapcalc.html
    +++ b/temporal/t.rast.mapcalc/t.rast.mapcalc.html
    @@ -128,15 +128,15 @@ 

    NOTES

    EXAMPLES

    The following command creates a new space time raster dataset -january_under_0 that will set to null all cells with +january_under_0 that will set to null all cells with temperature above zero in the January maps while keeping all the rest as in the original time series. This will change the maximum values of all January maps in the new STRDS as compared to the original one, -tempmean_monthly. +tempmean_monthly.
     t.rast.mapcalc input=tempmean_monthly output=january_under_0 basename=january_under_0 \
    -    expression="if(start_month() == 1 && tempmean_monthly > 0, null(), tempmean_monthly)"
    +    expression="if(start_month() == 1 && tempmean_monthly > 0, null(), tempmean_monthly)"
     
     # print minimum and maximum only for January in the new strds
     t.rast.list january_under_0 columns=name,start_time,min,max | grep 01-01
    diff --git a/temporal/t.rast.out.vtk/t.rast.out.vtk.html b/temporal/t.rast.out.vtk/t.rast.out.vtk.html
    index c0a000cf3e5..9746406f093 100644
    --- a/temporal/t.rast.out.vtk/t.rast.out.vtk.html
    +++ b/temporal/t.rast.out.vtk/t.rast.out.vtk.html
    @@ -1,6 +1,6 @@
     

    DESCRIPTION

    -This module exports all maps registered in a space time raster datasets +t.rast.out.vtk exports all maps registered in a space time raster datasets as VTK legacy files using a numerical numbering scheme. The VTK files can be visualized with any VTK based visualize. Our preferred tool is ParaView. The VTK legacy files are created using r.out.vtk. diff --git a/temporal/t.rast.series/t.rast.series.html b/temporal/t.rast.series/t.rast.series.html index 494605cb294..9a1a09db512 100644 --- a/temporal/t.rast.series/t.rast.series.html +++ b/temporal/t.rast.series/t.rast.series.html @@ -20,8 +20,7 @@

    NOTES

    will slow down processing, the user can set a higher limit with the file_limit parameter. Note that file_limit limit should not exceed the user-specific limit on open files set by your operating system. See the -Wiki +Wiki for more information.

    Performance

    @@ -49,7 +48,7 @@

    Estimate the average temperature for a subset of the time series

     t.rast.series input=tempmean_daily output=tempmean_season method=average \
    -  where="start_time >= '2012-06' and start_time <= '2012-08'"
    +  where="start_time >= '2012-06' and start_time <= '2012-08'"
     

    Climatology: single month in a multi-annual time series

    diff --git a/temporal/t.rast.to.rast3/t.rast.to.rast3.html b/temporal/t.rast.to.rast3/t.rast.to.rast3.html index 778e4869c68..3ece18d6ef2 100644 --- a/temporal/t.rast.to.rast3/t.rast.to.rast3.html +++ b/temporal/t.rast.to.rast3/t.rast.to.rast3.html @@ -54,7 +54,7 @@

    EXAMPLE

     # create the subset for 2012 data
     t.rast.extract input=tempmean_monthly output=tempmean_monthly_later_2012 \
    -               where="start_time >= '2012-01-01'"
    +               where="start_time >= '2012-01-01'"
     
     # set the right 3D region
     g.region -p3 res3=500
    diff --git a/temporal/t.rast.univar/t.rast.univar.html b/temporal/t.rast.univar/t.rast.univar.html
    index 7ecc0ed3316..ee1f6940764 100644
    --- a/temporal/t.rast.univar/t.rast.univar.html
    +++ b/temporal/t.rast.univar/t.rast.univar.html
    @@ -56,5 +56,5 @@ 

    SEE ALSO

    AUTHOR

    -Sören Gebbert, Thünen Institute of Climate-Smart Agriculture -Stefan Blumentrath, (Support for zones, parallel processing, and spatial relations) +Sören Gebbert, Thünen Institute of Climate-Smart Agriculture
    +Stefan Blumentrath (support for zones, parallel processing, and spatial relations) diff --git a/temporal/t.rast.what/t.rast.what.html b/temporal/t.rast.what/t.rast.what.html index a6c112ebbb0..3bd5a6ebd8e 100644 --- a/temporal/t.rast.what/t.rast.what.html +++ b/temporal/t.rast.what/t.rast.what.html @@ -109,7 +109,7 @@

    Example 2

    # using the where statement to select a subset of the STRDS # and stdout as output t.rast.what strds=A points=points \ - where="start_time >= '1990-03-01'" layout=timerow -n + where="start_time >= '1990-03-01'" layout=timerow -n x|y|1990-03-01 00:00:00;1990-04-01 00:00:00|1990-04-01 00:00:00;1990-05-01 00:00:00 115.004358627375|36.3593955782903|3|4 diff --git a/temporal/t.rast3d.algebra/t.rast3d.algebra.html b/temporal/t.rast3d.algebra/t.rast3d.algebra.html index 35680946f8d..29ec831cf68 100644 --- a/temporal/t.rast3d.algebra/t.rast3d.algebra.html +++ b/temporal/t.rast3d.algebra/t.rast3d.algebra.html @@ -8,7 +8,7 @@

    NOTES

    The module expects an expression as input parameter in the following form:

    -"result = expression" +"result = expression"

    The statement structure is exact the same as of t.rast.algebra, @@ -17,7 +17,7 @@

    NOTES

    REFERENCES

    -PLY(Python-Lex-Yacc) +PLY(Python-Lex-Yacc)

    SEE ALSO

    diff --git a/temporal/t.rast3d.univar/t.rast3d.univar.html b/temporal/t.rast3d.univar/t.rast3d.univar.html index d9ccd43e154..a46f90507fb 100644 --- a/temporal/t.rast3d.univar/t.rast3d.univar.html +++ b/temporal/t.rast3d.univar/t.rast3d.univar.html @@ -15,5 +15,5 @@

    SEE ALSO

    AUTHOR

    -Sören Gebbert, Thünen Institute of Climate-Smart Agriculture -Stefan Blumentrath, (Support for zones) +Sören Gebbert, Thünen Institute of Climate-Smart Agriculture
    +Stefan Blumentrath (support for zones) diff --git a/temporal/t.register/t.register.html b/temporal/t.register/t.register.html index ca6c3339db3..31e11fb5962 100644 --- a/temporal/t.register/t.register.html +++ b/temporal/t.register/t.register.html @@ -275,9 +275,7 @@

    Importing and registering ECA&D climatic data

    Europe based on ECA&D information. Download and decompress mean temperature data from: -here -by accepting their -Terms of use. +here.
     # import E-OBS V12 into a lat-long project (alternatively, use r.external)
    diff --git a/temporal/t.remove/t.remove.html b/temporal/t.remove/t.remove.html
    index 3d9a9be56f7..09aeb53cefc 100644
    --- a/temporal/t.remove/t.remove.html
    +++ b/temporal/t.remove/t.remove.html
    @@ -3,7 +3,7 @@ 

    DESCRIPTION

    The module t.remove removes space time datasets (STRDS, STR3DS, STVDS) from the temporal database. In other words, by default it deletes the relevant database entries. It can also unregister maps from temporal -database using the recursive mode -r (recursive) +database using the recursive mode -r (recursive).

    Optionally, also the raster, 3D raster and vector maps of the space time datasets can be removed from the current mapset using the -d (delete) @@ -22,23 +22,23 @@

    EXAMPLE

    raster maps) are removed:
    -#Create new and empty STRDS
    +# Create new and empty STRDS
     t.create output=precip_months_sum semantictype=mean \
       title="Monthly sum of precipitation" \
       description="Monthly sum of precipitation for the \
       North Carolina sample data"
     
    -#Register maps from sample dataset (selecting a subset with g.list)
    +# Register maps from sample dataset (selecting a subset with g.list)
     t.register -i type=raster input=precip_months_sum \
       maps=$(g.list type=raster pattern="201*_precip" separator=comma) \
       start="2010-01-01" increment="1 months"
     
    -#Create some new data by aggregating with 1 years granularity
    +# Create some new data by aggregating with 1 years granularity
     t.rast.aggregate input=precip_months_sum \
    -  output=precip_years_sum basename=precip_years_sum granularity="1 \
    -  years" method=sum
    +  output=precip_years_sum basename=precip_years_sum \
    +  granularity="1 years" method=sum
     
    -#Remove all newly produced data:
    +# Remove all newly produced data:
     
     # a) the aggregated STRDS with 1 years granularity along with its raster maps
     t.remove -df type=strds input=precip_years_sum
    diff --git a/temporal/t.rename/t.rename.html b/temporal/t.rename/t.rename.html
    index f7270c0741c..98ec45270bd 100644
    --- a/temporal/t.rename/t.rename.html
    +++ b/temporal/t.rename/t.rename.html
    @@ -1,7 +1,8 @@
     

    DESCRIPTION

    -This module renames space time datasets of different types (STRDS, STVDS, STR3DS) -and updates the space time dataset register entries of the registered maps. +The t.rename module renames space time datasets of different +types (STRDS, STVDS, STR3DS) and updates the space time dataset register +entries of the registered maps.

    NOTES

    @@ -12,7 +13,7 @@

    EXAMPLE

    A new vector space time dataset will be created, renamed and in the end removed
    -#Create new and empty STVDS
    +# Create new and empty STVDS
     t.create type=stvds output=toberenamed semantictype=mean \
       title="Example to rename" \
       description="This is an example just to show how rename"
    diff --git a/temporal/t.sample/t.sample.html b/temporal/t.sample/t.sample.html
    index 58cac201de9..0ae2adb1d84 100644
    --- a/temporal/t.sample/t.sample.html
    +++ b/temporal/t.sample/t.sample.html
    @@ -1,6 +1,6 @@
     

    DESCRIPTION

    -The purpose of this module is to compute and to show spatio-temporal +The purpose of t.sample is to compute and to show spatio-temporal relations between space time datasets of different type. Several input space time datasets are sampled by a sample space time dataset using temporal topological relations. The types of the input space time @@ -57,7 +57,7 @@

    EXAMPLE

    n1=`g.tempfile pid=1 -d` n2=`g.tempfile pid=2 -d` -cat > "${n1}" << EOF +cat > "${n1}" << EOF a1 a2 a3 @@ -66,7 +66,7 @@

    EXAMPLE

    a6 EOF -cat > "${n2}" << EOF +cat > "${n2}" << EOF pnts1|2001-01-01|2001-03-01 pnts2|2001-05-01|2001-07-01 EOF diff --git a/temporal/t.select/t.select.html b/temporal/t.select/t.select.html index 0d64ce40a6e..bef3af167e3 100644 --- a/temporal/t.select/t.select.html +++ b/temporal/t.select/t.select.html @@ -6,7 +6,7 @@

    DESCRIPTION

    PROGRAM USE

    The module expects an expression as input parameter in the following form:

    -"result = expression" +"result = expression"

    The statement structure is similar to r.mapcalc, see r.mapcalc. @@ -218,11 +218,11 @@

    Logical operators

    == equal != not equal - > greater than - >= greater than or equal - < less than - <= less than or equal - && and + > greater than + >= greater than or equal + < less than + <= less than or equal + && and || or
    @@ -272,23 +272,23 @@

    Comparison operator

    The structure is similar to the select operator with the extension of an aggregation operator: {"comparison operator", "topological relations", aggregation operator, "temporal operator"}
    -This aggregation operator (| or &) define the behaviour if a map is related the more +This aggregation operator (| or &) define the behaviour if a map is related the more than one map, e.g for the topological relations 'contains'. -Should all (&) conditions for the related maps be true or is it sufficient to +Should all (&) conditions for the related maps be true or is it sufficient to have any (|) condition that is true. The resulting boolean value is then compared -to the first condition by the comparison operator (|| or &&). +to the first condition by the comparison operator (|| or &&). As default the aggregation operator is related to the comparison operator:
    -Comparison operator -> aggregation operator: +Comparison operator -> aggregation operator:
    -|| -> | and && -> &
    +|| -> | and && -> &
     
    Examples:
     Condition 1 {||, equal, r} Condition 2
    -Condition 1 {&&, equal|during, l} Condition 2
    -Condition 1 {&&, equal|contains, |, l} Condition 2
    -Condition 1 {&&, equal|during, l} Condition 2 && Condition 3
    -Condition 1 {&&, equal|during, l} Condition 2 {&&,contains, |, r} Condition 3
    +Condition 1 {&&, equal|during, l} Condition 2
    +Condition 1 {&&, equal|contains, |, l} Condition 2
    +Condition 1 {&&, equal|during, l} Condition 2 && Condition 3
    +Condition 1 {&&, equal|during, l} Condition 2 {&&,contains, |, r} Condition 3
     

    Hash operator

    @@ -303,7 +303,7 @@

    Hash operator

    A list of integers (scalars) corresponding to the maps of A that contain maps from B will be returned.

    -C = if({equal}, A {#, contains} B > 2, A {:, contains} B)
    +C = if({equal}, A {#, contains} B > 2, A {:, contains} B)
     
    This expression selects all maps from A that temporally contains at least 2 maps from B and stores them in space time dataset C. The leading equal statement @@ -340,14 +340,14 @@

    EXAMPLES

    with space time dataset B and C and are earlier that Jan. 1. 2005 and store them in space time dataset D.
    -D = if(start_date(A) < "2005-01-01", A : B : C)
    +D = if(start_date(A) < "2005-01-01", A : B : C)
     
    Select all maps from space time dataset A which contains more than three maps of space time dataset B, else select maps from C with time stamps that are not equal to A and store them in space time dataset D.
    -D = if(A {#, contains} B > 3, A {:, contains} B, C)
    +D = if(A {#, contains} B > 3, A {:, contains} B, C)
     
    Select all maps from space time dataset B which are during the temporal @@ -359,7 +359,7 @@

    EXAMPLES

    REFERENCES

    -PLY(Python-Lex-Yacc) +PLY(Python-Lex-Yacc)

    Gebbert, S., Leppelt, T., Pebesma, E., 2019. A topology based spatio-temporal map algebra for big data analysis. Data 4, 86. https://doi.org/10.3390/data4020086 diff --git a/temporal/t.support/t.support.html b/temporal/t.support/t.support.html index be9e45b9d42..e7958797cb5 100644 --- a/temporal/t.support/t.support.html +++ b/temporal/t.support/t.support.html @@ -1,6 +1,7 @@

    DESCRIPTION

    -This module is dedicated to modify and update the metadata of a space time dataset. +The t.support module is dedicated to modify and update the metadata of +a space time dataset.

    The title, description and the semantic type can be modified.

    diff --git a/temporal/t.unregister/t.unregister.html b/temporal/t.unregister/t.unregister.html index c6f59a9f1fc..b54782e592d 100644 --- a/temporal/t.unregister/t.unregister.html +++ b/temporal/t.unregister/t.unregister.html @@ -1,7 +1,7 @@

    DESCRIPTION

    -This module is designed to unregister raster, 3D raster and vector map -layers from space time datasets and the temporal database. +The t.unregister module is designed to unregister raster, 3D raster +and vector map layers from space time datasets and the temporal database.

    Map layer that should be unregistered from the temporal database can be diff --git a/temporal/t.unregister/t.unregister.py b/temporal/t.unregister/t.unregister.py index cacfe579a7b..a67ef91adb5 100755 --- a/temporal/t.unregister/t.unregister.py +++ b/temporal/t.unregister/t.unregister.py @@ -103,17 +103,16 @@ def main(): # Read the map list from file if file: - fd = open(file) - - line = True - while True: - line = fd.readline() - if not line: - break - - mapname = line.strip() - mapid = dummy.build_id(mapname, mapset) - maplist.append(mapid) + with open(file) as fd: + line = True + while True: + line = fd.readline() + if not line: + break + + mapname = line.strip() + mapid = dummy.build_id(mapname, mapset) + maplist.append(mapid) num_maps = len(maplist) update_dict = {} diff --git a/temporal/t.upgrade/t.upgrade.html b/temporal/t.upgrade/t.upgrade.html index 5b0517a001a..1fd6e4986ca 100644 --- a/temporal/t.upgrade/t.upgrade.html +++ b/temporal/t.upgrade/t.upgrade.html @@ -1,7 +1,7 @@

    DESCRIPTION

    -This module upgrades the temporal database in the current mapset -from version 2 (default in GRASS 7) to 3 (default in GRASS 8). +The t.upgrade module upgrades the temporal database in the current +mapset from version 2 (default in GRASS 7) to 3 (default in GRASS 8). The version 3 introduces a semantic label support, see i.band.library for details. diff --git a/temporal/t.vect.algebra/t.vect.algebra.html b/temporal/t.vect.algebra/t.vect.algebra.html index 48931efa302..df91dfe0d62 100644 --- a/temporal/t.vect.algebra/t.vect.algebra.html +++ b/temporal/t.vect.algebra/t.vect.algebra.html @@ -221,11 +221,11 @@

    Logical operators

    == equal != not equal - > greater than - >= greater than or equal - < less than - <= less than or equal - && and + > greater than + >= greater than or equal + < less than + <= less than or equal + && and || or
    @@ -280,27 +280,27 @@

    Comparison operator


    -This aggregation operator (| or &) define the behaviour if a map is related the more +This aggregation operator (| or &) define the behaviour if a map is related the more than one map, e.g for the topological relations 'contains'. -Should all (&) conditions for the related maps be true or is it sufficient to +Should all (&) conditions for the related maps be true or is it sufficient to have any (|) condition that is true. The resulting boolean value is then compared -to the first condition by the comparison operator (|| or &&). +to the first condition by the comparison operator (|| or &&). As default the aggregation operator is related to the comparison operator:
    Comparison operator -> aggregation operator:
    -|| -> | and && -> &
    +|| -> | and && -> &
     
    Examples:
     Condition 1 {||, equal, r} Condition 2
    -Condition 1 {&&, equal|during, l} Condition 2
    -Condition 1 {&&, equal|contains, |, l} Condition 2
    -Condition 1 {&&, equal|during, l} Condition 2 && Condition 3
    -Condition 1 {&&, equal|during, l} Condition 2 {&&,contains, |, r} Condition 3
    +Condition 1 {&&, equal|during, l} Condition 2
    +Condition 1 {&&, equal|contains, |, l} Condition 2
    +Condition 1 {&&, equal|during, l} Condition 2 && Condition 3
    +Condition 1 {&&, equal|during, l} Condition 2 {&&,contains, |, r} Condition 3
     

    Hash operator

    @@ -320,7 +320,7 @@

    Hash operator

    -C = if({equal}, A {#, contains} B > 2, A {:, contains} B)
    +C = if({equal}, A {#, contains} B > 2, A {:, contains} B)
     
    This expression selects all maps from A that temporally contains at least 2 @@ -365,7 +365,7 @@

    Spatial vector operators

      Boolean Name   Operator Meaning         Precedence   Correspondent function
     ----------------------------------------------------------------------------------
    - AND            &        Intersection          1      (v.overlay operator=and)
    + AND            &        Intersection          1      (v.overlay operator=and)
      OR             |        Union                 1      (v.overlay operator=or)
      DISJOINT OR    +        Disjoint union        1      (v.patch)
      XOR            ^        Symmetric difference  1      (v.overlay operator=xor)
    @@ -402,15 +402,15 @@ 

    Combinations of temporal, vector and select operators

    a1 of A:
    -C = A {&, contains} B --> c1 = a1 & b1 & b2 & b3
    +C = A {&, contains} B --> c1 = a1 & b1 & b2 & b3
     
    Keep attention that the aggregation behaviour is not symmetric:
    -C = B {&, during} A --> c1 = b1 & a1
    -                        c2 = b2 & a1
    -                        c3 = b3 & a1
    +C = B {&, during} A --> c1 = b1 & a1
    +                        c2 = b2 & a1
    +                        c3 = b3 & a1
     

    Examples:

    @@ -420,7 +420,7 @@

    Examples:

    temporary before Jan. 1. 2005 and store them in space time dataset D.
    -D = if(start_date(A) < "2005-01-01", A & B)
    +D = if(start_date(A) < "2005-01-01", A & B)
     
    Buffer all vector points from space time vector dataset A and B with a @@ -429,7 +429,7 @@

    Examples:

    vector dataset D with intersected time stamps.
    -D = buff_p(A, 1) {&,overlaps|overlapped|equal|during|contains,i} buff_p(B, 1)
    +D = buff_p(A, 1) {&,overlaps|overlapped|equal|during|contains,i} buff_p(B, 1)
     
    Select all maps from space time dataset B which are during the temporal @@ -442,7 +442,7 @@

    Examples:

    REFERENCES

    -PLY(Python-Lex-Yacc) +PLY(Python-Lex-Yacc)

    SEE ALSO

    diff --git a/temporal/t.vect.db.select/t.vect.db.select.html b/temporal/t.vect.db.select/t.vect.db.select.html index 939c0d49c41..17b12b861e7 100644 --- a/temporal/t.vect.db.select/t.vect.db.select.html +++ b/temporal/t.vect.db.select/t.vect.db.select.html @@ -26,7 +26,7 @@

    EXAMPLE

    before 1900-01-01.
    -t.vect.db.select input=shoreline column=DATE,SOURCE t_where="start_time < 1900"
    +t.vect.db.select input=shoreline column=DATE,SOURCE t_where="start_time < 1900"
     start_time|end_time|DATE|SOURCE
     1849|1873|01/01/1858|NOAA/USGS
     1849|1873|01/01/1857|NOAA/USGS
    diff --git a/temporal/t.vect.export/t.vect.export.py b/temporal/t.vect.export/t.vect.export.py
    index d440f41d184..72004125af6 100755
    --- a/temporal/t.vect.export/t.vect.export.py
    +++ b/temporal/t.vect.export/t.vect.export.py
    @@ -75,17 +75,17 @@ def main():
         import grass.temporal as tgis
     
         # Get the options
    -    _input = options["input"]
    +    input_ = options["input"]
         output = options["output"]
         compression = options["compression"]
         directory = options["directory"]
         where = options["where"]
    -    _format = options["format"]
    +    format_ = options["format"]
     
         # Make sure the temporal database exists
         tgis.init()
         # Export the space time vector dataset
    -    tgis.export_stds(_input, output, compression, directory, where, _format, "stvds")
    +    tgis.export_stds(input_, output, compression, directory, where, format_, "stvds")
     
     
     ############################################################################
    diff --git a/temporal/t.vect.extract/t.vect.extract.html b/temporal/t.vect.extract/t.vect.extract.html
    index 6de3552d67d..3904cd6800b 100644
    --- a/temporal/t.vect.extract/t.vect.extract.html
    +++ b/temporal/t.vect.extract/t.vect.extract.html
    @@ -7,10 +7,10 @@ 

    DESCRIPTION

    EXAMPLE

    In the following example a new space time vector dataset will be create -with all the data later than 2000: +with all the data later than the year 2000:
    -t.vect.extract input=shoreline where="start_time > 2000" \
    +t.vect.extract input=shoreline where="start_time > 2000" \
                    output=shoreline_later_2000 basename=new_shoreline
     
     t.info shoreline_later_2000@shoreline type=stvds
    @@ -61,7 +61,7 @@ 

    EXAMPLE

    | Command history: | # 2014-11-29 08:43:50 | t.vect.extract input="shoreline" - | where="start_time > 2000" output="shoreline_later_2000" + | where="start_time > 2000" output="shoreline_later_2000" | basename="new_shoreline" | # 2014-11-29 08:44:14 | t.support type="stvds" diff --git a/temporal/t.vect.list/t.vect.list.html b/temporal/t.vect.list/t.vect.list.html index 0e7211671a6..4b33bbc3626 100644 --- a/temporal/t.vect.list/t.vect.list.html +++ b/temporal/t.vect.list/t.vect.list.html @@ -2,7 +2,7 @@

    DESCRIPTION

    -This module provides the same functionality as +The t.vect.list module provides the same functionality as t.rast.list, the only difference is the vector map layer metadata. diff --git a/temporal/t.vect.observe.strds/t.vect.observe.strds.html b/temporal/t.vect.observe.strds/t.vect.observe.strds.html index e1ca0a6c6c3..c5e923af7e2 100644 --- a/temporal/t.vect.observe.strds/t.vect.observe.strds.html +++ b/temporal/t.vect.observe.strds/t.vect.observe.strds.html @@ -94,7 +94,7 @@

    EXAMPLE

    +----------------------------------------------------------------------------+ | Name: precip_stations_monthly | | Mapset: climate_2009_2012 | - | Project: nc_spm_temporal_workshop | + | Project: nc_spm_temporal_workshop | | Database: /grassdata | | Title: North Carolina 30 year precipitation normals (3D) | | Map scale: 1:1 | diff --git a/temporal/t.vect.univar/t.vect.univar.html b/temporal/t.vect.univar/t.vect.univar.html index 2e747567b0a..36015d9396c 100644 --- a/temporal/t.vect.univar/t.vect.univar.html +++ b/temporal/t.vect.univar/t.vect.univar.html @@ -5,7 +5,7 @@

    DESCRIPTION

    EXAMPLE

    -The example is based on the t.vect.observe.strds +The example is based on the t.vect.observe.strds example; so create the precip_stations space time vector dataset and after run the following command: diff --git a/temporal/temporalintro.html b/temporal/temporalintro.html index 2780efd8c85..c52b4e2057e 100644 --- a/temporal/temporalintro.html +++ b/temporal/temporalintro.html @@ -6,13 +6,13 @@
    • Space time raster datasets (strds) are designed to manage raster map time series. Modules that process strds have the - naming prefix t.rast. + naming prefix t.rast.
    • Space time 3D raster datasets (str3ds) are designed to manage 3D raster map time series. Modules that process str3ds have - the naming prefix t.rast3d. + the naming prefix t.rast3d.
    • Space time vector datasets (stvds) are designed to manage vector map time series. Modules that process stvds have the - naming prefix t.vect. + naming prefix t.vect.
    These new data types can be managed, analyzed and processed with @@ -35,9 +35,9 @@

    Temporal data management in general

    map. This is critical if:
    • The user has no write access to the maps from other mapsets - he/she wants to register
    • + he/she wants to register
    • If registered maps are removed from other mapsets, the temporal - database will not be updated and will contain ghost maps
    • + database will not be updated and will contain ghost maps
    SQLite3 or PostgreSQL are supported as temporal database backends. @@ -262,22 +262,32 @@

    See also

    + + diff --git a/utils/Makefile b/utils/Makefile index fdff52b8e3d..8137e1b7e63 100644 --- a/utils/Makefile +++ b/utils/Makefile @@ -5,13 +5,19 @@ SUBDIRS = timer g.html2man include $(MODULE_TOPDIR)/include/Make/Dir.make include $(MODULE_TOPDIR)/include/Make/Compile.make -default: parsubdirs $(UTILSDIR)/mkhtml.py \ +default: parsubdirs $(UTILSDIR)/mkdocs.py $(UTILSDIR)/mkhtml.py $(UTILSDIR)/mkmarkdown.py \ $(UTILSDIR)/generate_last_commit_file.py \ $(UTILSDIR)/g.echo$(EXE) +$(UTILSDIR)/mkdocs.py: mkdocs.py + $(INSTALL) $< $@ + $(UTILSDIR)/mkhtml.py: mkhtml.py $(INSTALL) $< $@ +$(UTILSDIR)/mkmarkdown.py: mkmarkdown.py + $(INSTALL) $< $@ + $(UTILSDIR)/generate_last_commit_file.py: generate_last_commit_file.py $(INSTALL) $< $@ diff --git a/utils/dep_tree2sql.sh b/utils/dep_tree2sql.sh index 176924fc382..99f061f86f4 100755 --- a/utils/dep_tree2sql.sh +++ b/utils/dep_tree2sql.sh @@ -36,28 +36,28 @@ # # You can then use simple queries such as: # -# grass=> SELECT object FROM obj_imp WHERE symbol = 'I_get_target' ; -# object -# -------------------------------------------------------------------- -# imagery/i.ortho.photo/photo.2image/OBJ.i686-pc-linux-gnu/target.o -# imagery/i.ortho.photo/photo.2target/OBJ.i686-pc-linux-gnu/target.o -# imagery/i.ortho.photo/photo.elev/OBJ.i686-pc-linux-gnu/main.o -# imagery/i.ortho.photo/photo.rectify/OBJ.i686-pc-linux-gnu/target.o -# imagery/i.ortho.photo/photo.target/OBJ.i686-pc-linux-gnu/main.o -# imagery/i.rectify/OBJ.i686-pc-linux-gnu/target.o +# grass=> SELECT object FROM obj_imp WHERE symbol = 'I_get_target' ; +# object +# -------------------------------------------------------------------- +# imagery/i.ortho.photo/photo.2image/OBJ.i686-pc-linux-gnu/target.o +# imagery/i.ortho.photo/photo.2target/OBJ.i686-pc-linux-gnu/target.o +# imagery/i.ortho.photo/photo.elev/OBJ.i686-pc-linux-gnu/main.o +# imagery/i.ortho.photo/photo.rectify/OBJ.i686-pc-linux-gnu/target.o +# imagery/i.ortho.photo/photo.target/OBJ.i686-pc-linux-gnu/main.o +# imagery/i.rectify/OBJ.i686-pc-linux-gnu/target.o # # to discover which files import a given symbol, or more complex queries # such as: # -# grass=> SELECT DISTINCT b.object FROM lib_exp a, obj_imp b -# grass-> WHERE a.library = 'libgrass_form.6.1.cvs.so' AND a.symbol = b.symbol ; -# object -# ----------------------------------------------------------- -# vector/v.digit/OBJ.i686-pc-linux-gnu/attr.o -# vector/v.digit/OBJ.i686-pc-linux-gnu/line.o -# vector/v.what/OBJ.i686-pc-linux-gnu/what.o -# visualization/nviz/src/OBJ.i686-pc-linux-gnu/query_vect.o -# (5 rows) +# grass=> SELECT DISTINCT b.object FROM lib_exp a, obj_imp b +# grass-> WHERE a.library = 'libgrass_form.6.1.cvs.so' AND a.symbol = b.symbol ; +# object +# ----------------------------------------------------------- +# vector/v.digit/OBJ.i686-pc-linux-gnu/attr.o +# vector/v.digit/OBJ.i686-pc-linux-gnu/line.o +# vector/v.what/OBJ.i686-pc-linux-gnu/what.o +# visualization/nviz/src/OBJ.i686-pc-linux-gnu/query_vect.o +# (5 rows) # # to discover which files import any symbol defined in a specific # library. And so on. @@ -71,111 +71,112 @@ # easiest way to figure out what is in a given table (apart from looking # at the name) is to just sample it, e.g.: # -# grass=> SELECT * FROM stlib_exp LIMIT 5 ; -# library | object | symbol -# -------------------+------------+--------------- -# libgrass_manage.a | add_elem.o | add_element -# libgrass_manage.a | ask.o | ask_in_mapset -# libgrass_manage.a | ask.o | ask_new -# libgrass_manage.a | ask.o | ask_old -# libgrass_manage.a | copyfile.o | copyfile -# (5 rows) +# grass=> SELECT * FROM stlib_exp LIMIT 5 ; +# library | object | symbol +# -------------------+------------+--------------- +# libgrass_manage.a | add_elem.o | add_element +# libgrass_manage.a | ask.o | ask_in_mapset +# libgrass_manage.a | ask.o | ask_new +# libgrass_manage.a | ask.o | ask_old +# libgrass_manage.a | copyfile.o | copyfile +# (5 rows) # tmpdir=/tmp/sql-grass dbname=grass if [ -n "$1" ]; then - builddir="$1" + builddir="$1" else - echo "Usage: del_tree2sql.sh " >&2 - exit 1 + echo "Usage: del_tree2sql.sh " >&2 + exit 1 fi rm -rf "$tmpdir" mkdir -m 711 "$tmpdir" || exit 1 -cd $builddir +cd "$builddir" || exit 1 -( cd dist.* +( + cd dist.* || exit 1 -#LD_LIBRARY_PATH=`pwd`/lib -#export LD_LIBRARY_PATH + #LD_LIBRARY_PATH=`pwd`/lib + #export LD_LIBRARY_PATH -find . -type f -perm +111 \! -name '*.so.*' \ - | while read file ; do ldd $file | sed 's!^!'$file'!' ; done 2>/dev/null \ - | sed -e 's/^\.\///' -e 's/ (0x.*)$//' -e 's/ => \(.*\)$/ \1/' -e 's/ => .*$//' \ - | fgrep -v 'not a dynamic executable' \ - | awk -vOFS='\t' '{print $1,$2,$3 ? $3 : $2}' \ - > "$tmpdir/ldd.lst" + find . -type f -perm /a+x \! -name '*.so.*' \ + | while read -r file; do ldd "$file" | sed 's!^!'"$file"'!'; done 2> /dev/null \ + | sed -e 's/^\.\///' -e 's/ (0x.*)$//' -e 's/ => \(.*\)$/ \1/' -e 's/ => .*$//' \ + | grep -F -v 'not a dynamic executable' \ + | awk -vOFS='\t' '{print $1,$2,$3 ? $3 : $2}' \ + > "$tmpdir/ldd.lst" -find . -type f -perm +111 \! -name '*.so' \ - | xargs nm -AD 2>/dev/null \ - | egrep ': {8}{1,2} U ' \ - | sed -e 's/:/ /g' -e 's/\.\///' \ - | awk -vOFS='\t' '{print $1,$3}' \ - > "$tmpdir/prog_imp.lst" + find . -type f -perm /a+x \! -name '*.so' -print0 \ + | xargs -0 nm -AD 2> /dev/null \ + | grep -E ': {8}{1,2} U ' \ + | sed -e 's/:/ /g' -e 's/\.\///' \ + | awk -vOFS='\t' '{print $1,$3}' \ + > "$tmpdir/prog_imp.lst" -find . -type f -perm +111 \! -name '*.so' \ - | xargs nm -AD 2>/dev/null \ - | egrep ':[0-9a-f]{8}{1,2} [BCDGRSTW] ' \ - | sed -e 's/:/ /g' -e 's/\.\///' \ - | awk -vOFS='\t' '{print $1,$4}' \ - > "$tmpdir/prog_exp.lst" + find . -type f -perm /a+x \! -name '*.so' -print0 \ + | xargs -0 nm -AD 2> /dev/null \ + | grep -E ':[0-9a-f]{8}{1,2} [BCDGRSTW] ' \ + | sed -e 's/:/ /g' -e 's/\.\///' \ + | awk -vOFS='\t' '{print $1,$4}' \ + > "$tmpdir/prog_exp.lst" ) -find * -type f -name 'lib?*.a' \ - | xargs nm -A \ - | egrep ':[0-9a-f]{8}{1,2} [BCDGRSTW] ' \ - | sed 's/:/ /g' \ - | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$2,$5}' \ - > "$tmpdir/stlib_exp.lst" - -find * -type f -name 'lib?*.so' \ - | xargs nm -AD \ - | egrep ':[0-9a-f]{8}{1,2} [BCDGRSTW] ' \ - | sed 's/:/ /g' \ - | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$4}' \ - > "$tmpdir/shlib_exp.lst" - -find * -type f -name '*.o' \ - | xargs nm -A \ - | egrep ':[0-9a-f]{8}{1,2} [BCDGRSTW] ' \ - | sed 's/:/ /g' \ - | awk -vOFS='\t' '{print $1,$4}' \ - > "$tmpdir/obj_exp.lst" - -find * -type f -name 'lib?*.a' \ - | xargs nm -A \ - | egrep ': {8}{1,2} U ' \ - | sed 's/:/ /g' \ - | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$2,$4}' \ - > "$tmpdir/stlib_imp.lst" - -find * -type f -name 'lib?*.so' \ - | xargs nm -AD \ - | egrep ': {8}{1,2} U ' \ - | sed 's/:/ /g' \ - | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$3}' \ - > "$tmpdir/shlib_imp.lst" - -find * -type f -name '*.o' \ - | xargs nm -A \ - | egrep ': {8}{1,2} U ' \ - | sed 's/:/ /g' \ - | awk -vOFS='\t' '{print $1,$3}' \ - > "$tmpdir/obj_imp.lst" - -libs=`awk '{print $3}' "$tmpdir/ldd.lst" | uniq | sort | uniq` - -nm -AD $libs \ - | egrep ':[0-9a-f]{8}{1,2} [TWDRC] ' \ - | sed 's/:/ /g' \ - | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$4}' \ - > "$tmpdir/libs.lst" - -cat > "$tmpdir/ansi.lst" < "$tmpdir/stlib_exp.lst" + +find ./* -type f -name 'lib?*.so' -print0 \ + | xargs -0 nm -AD \ + | grep -E ':[0-9a-f]{8}{1,2} [BCDGRSTW] ' \ + | sed 's/:/ /g' \ + | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$4}' \ + > "$tmpdir/shlib_exp.lst" + +find ./* -type f -name '*.o' -print0 \ + | xargs -0 nm -A \ + | grep -E ':[0-9a-f]{8}{1,2} [BCDGRSTW] ' \ + | sed 's/:/ /g' \ + | awk -vOFS='\t' '{print $1,$4}' \ + > "$tmpdir/obj_exp.lst" + +find ./* -type f -name 'lib?*.a' -print0 \ + | xargs -0 nm -A \ + | grep -E ': {8}{1,2} U ' \ + | sed 's/:/ /g' \ + | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$2,$4}' \ + > "$tmpdir/stlib_imp.lst" + +find ./* -type f -name 'lib?*.so' -print0 \ + | xargs -0 nm -AD \ + | grep -E ': {8}{1,2} U ' \ + | sed 's/:/ /g' \ + | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$3}' \ + > "$tmpdir/shlib_imp.lst" + +find ./* -type f -name '*.o' -print0 \ + | xargs -0 nm -A \ + | grep -E ': {8}{1,2} U ' \ + | sed 's/:/ /g' \ + | awk -vOFS='\t' '{print $1,$3}' \ + > "$tmpdir/obj_imp.lst" + +mapfile -t libs < <(awk '{print $3}' "$tmpdir/ldd.lst" | uniq | sort | uniq) + +nm -AD "${libs[@]}" \ + | grep -E ':[0-9a-f]{8}{1,2} [TWDRC] ' \ + | sed 's/:/ /g' \ + | awk -vOFS='\t' '{print gensub("^[^ ]*/","",1,$1),$4}' \ + > "$tmpdir/libs.lst" + +cat > "$tmpdir/ansi.lst" << EOF abort asctime atexit @@ -278,199 +279,199 @@ fsetpos64 tmpfile64 EOF -dropdb "$dbname" +dropdb --if-exists "$dbname" createdb "$dbname" -psql -n -q -d "$dbname" < 1 ; + SELECT DISTINCT symbol + FROM lib_exp + GROUP BY symbol + HAVING COUNT(*) > 1 ; CREATE TABLE duplicates2 AS - SELECT * - FROM lib_exp - WHERE symbol IN ( - SELECT symbol - FROM duplicates - ) ; + SELECT * + FROM lib_exp + WHERE symbol IN ( + SELECT symbol + FROM duplicates + ) ; SELECT DISTINCT library, symbol - INTO TABLE lib_imp - FROM stlib_imp + INTO TABLE lib_imp + FROM stlib_imp UNION SELECT DISTINCT library, symbol - FROM shlib_imp ; + FROM shlib_imp ; CREATE TABLE imports AS - SELECT a.library, a.symbol - FROM lib_imp a - WHERE NOT EXISTS ( - SELECT b.library, b.symbol - FROM lib_exp b - WHERE b.symbol = a.symbol - AND b.library = a.library - ) ; + SELECT a.library, a.symbol + FROM lib_imp a + WHERE NOT EXISTS ( + SELECT b.library, b.symbol + FROM lib_exp b + WHERE b.symbol = a.symbol + AND b.library = a.library + ) ; CREATE TABLE defined AS - SELECT DISTINCT symbol - FROM lib_exp ; + SELECT DISTINCT symbol + FROM lib_exp ; CREATE TABLE used AS - SELECT DISTINCT symbol - FROM imports ; + SELECT DISTINCT symbol + FROM imports ; CREATE TABLE undefined AS - SELECT symbol - FROM used u - WHERE NOT EXISTS ( - SELECT * - FROM defined d - WHERE d.symbol = u.symbol - ) ; + SELECT symbol + FROM used u + WHERE NOT EXISTS ( + SELECT * + FROM defined d + WHERE d.symbol = u.symbol + ) ; SELECT symbol INTO TABLE undefined_1 - FROM undefined + FROM undefined EXCEPT SELECT b.symbol - FROM undefined a, libs b - WHERE a.symbol = b.symbol ; + FROM undefined a, libs b + WHERE a.symbol = b.symbol ; CREATE TABLE undefined_2 AS - SELECT i.symbol, i.object, i.library - FROM stlib_imp i, undefined_1 u - WHERE i.symbol = u.symbol ; + SELECT i.symbol, i.object, i.library + FROM stlib_imp i, undefined_1 u + WHERE i.symbol = u.symbol ; CREATE TABLE depends AS - SELECT i.library AS im_lib, - i.symbol AS symbol, - e.library AS ex_lib - FROM imports i, lib_exp e - WHERE i.symbol = e.symbol ; + SELECT i.library AS im_lib, + i.symbol AS symbol, + e.library AS ex_lib + FROM imports i, lib_exp e + WHERE i.symbol = e.symbol ; CREATE TABLE lib_deps AS - SELECT DISTINCT im_lib, ex_lib - FROM depends - WHERE im_lib <> ex_lib ; + SELECT DISTINCT im_lib, ex_lib + FROM depends + WHERE im_lib <> ex_lib ; CREATE TABLE lib_deps_1 AS - SELECT a.im_lib, - a.ex_lib AS in_lib, - b.ex_lib - FROM lib_deps a, lib_deps b - WHERE a.ex_lib = b.im_lib ; + SELECT a.im_lib, + a.ex_lib AS in_lib, + b.ex_lib + FROM lib_deps a, lib_deps b + WHERE a.ex_lib = b.im_lib ; CREATE TABLE lib_deps_2 AS - SELECT a.im_lib, - a.in_lib AS in1_lib, - a.ex_lib AS in2_lib, - b.ex_lib - FROM lib_deps_1 a, lib_deps b - WHERE a.ex_lib = b.im_lib - AND a.im_lib <> a.ex_lib ; + SELECT a.im_lib, + a.in_lib AS in1_lib, + a.ex_lib AS in2_lib, + b.ex_lib + FROM lib_deps_1 a, lib_deps b + WHERE a.ex_lib = b.im_lib + AND a.im_lib <> a.ex_lib ; SELECT im_lib, ex_lib INTO TABLE lib_deps_trans - FROM lib_deps + FROM lib_deps UNION SELECT im_lib, ex_lib - FROM lib_deps_1 + FROM lib_deps_1 UNION SELECT im_lib, ex_lib - FROM lib_deps_2 ; + FROM lib_deps_2 ; CREATE TABLE prog_libs AS SELECT DISTINCT a.program, b.library @@ -479,92 +480,92 @@ WHERE a.symbol = b.symbol ; SELECT DISTINCT a.symbol INTO TABLE libc - FROM prog_imp a, libs b - WHERE a.symbol = b.symbol - AND b.library = 'libc.so.6' + FROM prog_imp a, libs b + WHERE a.symbol = b.symbol + AND b.library = 'libc.so.6' UNION - SELECT DISTINCT a.symbol - FROM imports a, libs b - WHERE a.symbol = b.symbol - AND b.library = 'libc.so.6' ; + SELECT DISTINCT a.symbol + FROM imports a, libs b + WHERE a.symbol = b.symbol + AND b.library = 'libc.so.6' ; SELECT symbol INTO nonansi - FROM libc - WHERE symbol !~ '_.*' + FROM libc + WHERE symbol !~ '_.*' EXCEPT SELECT symbol - FROM ansi ; + FROM ansi ; CREATE TABLE nonansi_progs AS - SELECT a.program, a.symbol - FROM prog_imp a, nonansi b - WHERE a.symbol = b.symbol ; + SELECT a.program, a.symbol + FROM prog_imp a, nonansi b + WHERE a.symbol = b.symbol ; CREATE TABLE nonansi_libs AS - SELECT a.library, a.symbol - FROM imports a, nonansi b - WHERE a.symbol = b.symbol ; + SELECT a.library, a.symbol + FROM imports a, nonansi b + WHERE a.symbol = b.symbol ; CREATE TABLE nonansi_prog_counts AS - SELECT symbol, COUNT(*) - FROM nonansi_progs - GROUP BY symbol ; + SELECT symbol, COUNT(*) + FROM nonansi_progs + GROUP BY symbol ; CREATE TABLE nonansi_lib_counts AS - SELECT symbol, COUNT(*) - FROM nonansi_libs - GROUP BY symbol ; + SELECT symbol, COUNT(*) + FROM nonansi_libs + GROUP BY symbol ; SELECT symbol INTO TABLE nonansi_counts - FROM nonansi_prog_counts + FROM nonansi_prog_counts UNION SELECT symbol - FROM nonansi_lib_counts ; + FROM nonansi_lib_counts ; ALTER TABLE nonansi_counts - ADD COLUMN progs INTEGER ; + ADD COLUMN progs INTEGER ; ALTER TABLE nonansi_counts - ADD COLUMN libs INTEGER ; + ADD COLUMN libs INTEGER ; UPDATE nonansi_counts - SET progs = 0, libs = 0 ; + SET progs = 0, libs = 0 ; UPDATE nonansi_counts - SET progs = b.count - FROM nonansi_prog_counts b - WHERE nonansi_counts.symbol = b.symbol ; + SET progs = b.count + FROM nonansi_prog_counts b + WHERE nonansi_counts.symbol = b.symbol ; UPDATE nonansi_counts - SET libs = c.count - FROM nonansi_lib_counts c - WHERE nonansi_counts.symbol = c.symbol; + SET libs = c.count + FROM nonansi_lib_counts c + WHERE nonansi_counts.symbol = c.symbol; -- SELECT a.symbol, a.program --- FROM prog_imp a, nonansi_prog_counts b --- WHERE a.symbol = b.symbol --- AND a.program NOT LIKE 'bin/%' --- ORDER BY b.count DESC, b.symbol ; +-- FROM prog_imp a, nonansi_prog_counts b +-- WHERE a.symbol = b.symbol +-- AND a.program NOT LIKE 'bin/%' +-- ORDER BY b.count DESC, b.symbol ; -- SELECT symbol, library --- FROM duplicates2 --- ORDER BY symbol ; +-- FROM duplicates2 +-- ORDER BY symbol ; -- SELECT a.im_lib, a.ex_lib --- FROM lib_deps a, lib_deps b --- WHERE a.ex_lib = b.im_lib --- AND b.ex_lib = a.im_lib ; +-- FROM lib_deps a, lib_deps b +-- WHERE a.ex_lib = b.im_lib +-- AND b.ex_lib = a.im_lib ; -- SELECT * FROM lib_deps_2 --- WHERE im_lib = ex_lib ; +-- WHERE im_lib = ex_lib ; -- SELECT * FROM lib_deps_1 --- WHERE im_lib = ex_lib ; +-- WHERE im_lib = ex_lib ; -- SELECT im_lib FROM lib_deps_trans --- WHERE im_lib = ex_lib ; +-- WHERE im_lib = ex_lib ; -- SELECT a.program, a.library -- FROM ldd a @@ -578,3 +579,5 @@ UPDATE nonansi_counts -- ---------------------------------------------------------------------- EOF + +rm -rf "$tmpdir" diff --git a/utils/fix_typos.sh b/utils/fix_typos.sh index 68185b6b897..dab54f107a1 100755 --- a/utils/fix_typos.sh +++ b/utils/fix_typos.sh @@ -57,7 +57,7 @@ EXCLUDED_FILES="*/.svn*,configure,config.status,config.sub,*/autom4te.cache/*" EXCLUDED_FILES="$EXCLUDED_FILES,*/lib/cdhc/doc/goodness.ps,*/lib/cdhc/doc/goodness.tex,*/macosx/pkg/resources/ReadMe.rtf" EXCLUDED_FILES="$EXCLUDED_FILES,*/lib/gis/FIPS.code,*/lib/gis/projection,*/lib/proj/parms.table,*/lib/proj/units.table,*/lib/proj/desc.table" EXCLUDED_FILES="$EXCLUDED_FILES,*/locale/po/*.po" -EXCLUDED_FILES="$EXCLUDED_FILES,*/doc/notebooks/*.ipynb,*/*/*/*.ipynb" +EXCLUDED_FILES="$EXCLUDED_FILES,*/*/*/*.ipynb" EXCLUDED_FILES="$EXCLUDED_FILES,*/doc/*.svg,*/gui/icons/grass/*.svg,*/gui/images/*.svg,*/macosx/app/*.svg,*/man/*.svg,*/raster/*/*.svg" EXCLUDED_FILES="$EXCLUDED_FILES,*/fix_typos/*,fix_typos.sh,*.eps,geopackage_aspatial.html" EXCLUDED_FILES="$EXCLUDED_FILES,PROVENANCE.TXT,libtool,ltmain.sh,libtool.m4" diff --git a/utils/g.html2man/g.html2man.py b/utils/g.html2man/g.html2man.py index 58ffb2f7bca..46a749ae5d3 100755 --- a/utils/g.html2man/g.html2man.py +++ b/utils/g.html2man/g.html2man.py @@ -47,7 +47,7 @@ def main(): sf.close() # strip excess whitespace - blank_re = re.compile("[ \t\n]*\n([ \t]*\n)*") + blank_re = re.compile(r"[ \t\n]*\n([ \t]*\n)*") s = blank_re.sub("\n", s) s = s.lstrip() diff --git a/utils/g.html2man/ggroff.py b/utils/g.html2man/ggroff.py index a0458dd0334..c96fec4eab7 100644 --- a/utils/g.html2man/ggroff.py +++ b/utils/g.html2man/ggroff.py @@ -69,7 +69,7 @@ def __init__(self, filename, stream=sys.stdout): "index": [], } self.stack = [] - self.strip_re = re.compile("^[ \t]+") + self.strip_re = re.compile(r"^[ \t]+") self.filename = filename self.at_bol = True diff --git a/utils/g.html2man/htmltags.txt b/utils/g.html2man/htmltags.txt deleted file mode 100644 index 1ecbb475c13..00000000000 --- a/utils/g.html2man/htmltags.txt +++ /dev/null @@ -1,28 +0,0 @@ -List of tags supported by g.html2man - - -<HEADER> -<HEAD> -<BODY> -<BLINK> -<A HREF="#toc"></A> -<PRE> -<IMG SRC=> -<A NAME="toc"> -<HR> -<H2> -<H3> -<BR> -<BR> -<DT> -<DD> -<LI> -<UL> -<OL> -<B> -<I> -<P> -  - -Hackish: -<table> diff --git a/utils/g.html2man/rest.py b/utils/g.html2man/rest.py index c320f18e397..9f828a78808 100644 --- a/utils/g.html2man/rest.py +++ b/utils/g.html2man/rest.py @@ -20,11 +20,13 @@ def find(node, tag, attr=None, val=None): if isinstance(node, tuple): node = node[2] if not isinstance(node, list): - raise ValueError("child not found") + msg = "child not found" + raise ValueError(msg) for child in node: if match(child, tag, attr, val): return child - raise ValueError("child not found") + msg = "child not found" + raise ValueError(msg) children = itemgetter(2) diff --git a/utils/generate_release_notes.py b/utils/generate_release_notes.py index c8a22b01d86..fb3fb7c2813 100755 --- a/utils/generate_release_notes.py +++ b/utils/generate_release_notes.py @@ -125,13 +125,13 @@ def print_by_category(changes, categories, file=None): def binder_badge(tag): """Get mybinder Binder badge from a given tag, hash, or branch""" binder_image_url = "https://mybinder.org/badge_logo.svg" - binder_url = f"https://mybinder.org/v2/gh/OSGeo/grass/{tag}?urlpath=lab%2Ftree%2Fdoc%2Fnotebooks%2Fjupyter_example.ipynb" # noqa + binder_url = f"https://mybinder.org/v2/gh/OSGeo/grass/{tag}?urlpath=lab%2Ftree%2Fdoc%2Fexamples%2Fnotebooks%2Fjupyter_example.ipynb" # noqa return f"[![Binder]({binder_image_url})]({binder_url})" def print_support(file=None): url = "https://opencollective.com/grass/tiers/supporter/all.json" - response = requests.get(url=url) + response = requests.get(url=url, timeout=7) data = response.json() if data: print_section_heading_3("Monthly Financial Supporters", file=file) @@ -263,7 +263,8 @@ def notes_from_git_log(start_tag, end_tag, categories, exclude): ).stdout commits = yaml.safe_load(text) if not commits: - raise RuntimeError("No commits retrieved from git log (try different tags)") + msg = "No commits retrieved from git log (try different tags)" + raise RuntimeError(msg) svn_name_by_git_author = csv_to_dict( CONFIG_DIRECTORY / "svn_name_git_author.csv", diff --git a/utils/gitlog2changelog.py b/utils/gitlog2changelog.py index 5ad459aa2b1..f878041ca83 100755 --- a/utils/gitlog2changelog.py +++ b/utils/gitlog2changelog.py @@ -61,7 +61,7 @@ # Match the author line and extract the part we want # (Don't use startswith to allow Author override inside commit message.) elif "Author:" in line: - authorList = re.split(": ", line, 1) + authorList = re.split(r": ", line, 1) try: author = authorList[1] author = author[0 : len(author) - 1] @@ -71,7 +71,7 @@ # Match the date line elif line.startswith("Date:"): - dateList = re.split(": ", line, 1) + dateList = re.split(r": ", line, 1) try: date = dateList[1] date = date[0 : len(date) - 1] @@ -100,7 +100,7 @@ else: message = message + " " + line.strip() # If this line is hit all of the files have been stored for this commit - elif re.search("files? changed", line): + elif re.search(r"files? changed", line): filesFound = True continue # Collect the files for this commit. FIXME: Still need to add +/- to files diff --git a/utils/mkdocs.py b/utils/mkdocs.py new file mode 100644 index 00000000000..1323fe06229 --- /dev/null +++ b/utils/mkdocs.py @@ -0,0 +1,435 @@ +# common functions used by mkmarkdown.py and mkhtml.py + +import sys +import os +import json +import subprocess +import re +import urllib.parse as urlparse +from http import HTTPStatus +from pathlib import Path +from datetime import datetime +from urllib import request as urlrequest +from urllib.error import HTTPError, URLError + +try: + import grass.script as gs +except ImportError: + # During compilation GRASS GIS + gs = None + +from generate_last_commit_file import COMMIT_DATE_FORMAT + +HEADERS = { + "User-Agent": "Mozilla/5.0", +} +HTTP_STATUS_CODES = list(HTTPStatus) + +top_dir = os.path.abspath(os.getenv("MODULE_TOPDIR")) + + +def read_file(name): + try: + return Path(name).read_text() + except OSError: + return "" + + +def get_version_branch(major_version, addons_git_repo_url): + """Check if version branch for the current GRASS version exists, + if not, take branch for the previous version + For the official repo we assume that at least one version branch is present + + :param major_version int: GRASS GIS major version + :param addons_git_repo_url str: Addons Git ropository URL + + :return version_branch str: version branch + """ + version_branch = f"grass{major_version}" + if gs: + branch = gs.Popen( + [ + "git", + "ls-remote", + "--heads", + addons_git_repo_url, + f"refs/heads/{version_branch}", + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + branch, stderr = branch.communicate() + if stderr: + gs.fatal( + _( + "Failed to get branch from the Git repository" + " <{repo_path}>.\n{error}" + ).format( + repo_path=addons_git_repo_url, + error=gs.decode(stderr), + ) + ) + if version_branch not in gs.decode(branch): + version_branch = "grass{}".format(int(major_version) - 1) + return version_branch + + +def has_src_code_git(src_dir): + """Has core module or addon source code Git + + :param str src_dir: core module or addon root directory + + :return subprocess.CompletedProcess or None: subprocess.CompletedProcess + if core module or addon + source code has Git + """ + actual_dir = Path.cwd() + os.chdir(src_dir) + try: + process_result = subprocess.run( + [ + "git", + "log", + "-1", + f"--format=%H,{COMMIT_DATE_FORMAT}", + src_dir, + ], + capture_output=True, + ) # --format=%H,COMMIT_DATE_FORMAT commit hash,author date + os.chdir(actual_dir) + return process_result if process_result.returncode == 0 else None + except FileNotFoundError: + os.chdir(actual_dir) + return None + + +def get_last_git_commit(src_dir, top_dir, pgm, addon_path, major_version): + """Get last module/addon git commit + :param str src_dir: module/addon source dir + :param str top_dir: top source dir + :param str pgm: program name + :param str addon_path: addon path + :param str major_version: major GRASS version + + :return dict git_log: dict with key commit and date, if not + possible download commit from GitHub REST API + server values of keys have "unknown" string + """ + process_result = has_src_code_git(src_dir=src_dir) + if process_result: + return parse_git_commit( + commit=process_result.stdout.decode(), + src_dir=src_dir, + ) + if gs: + # Addons installation + return get_git_commit_from_rest_api_for_addon_repo( + addon_path=addon_path, src_dir=src_dir, pgm=pgm, major_version=major_version + ) + # During GRASS GIS compilation from source code without Git + return get_git_commit_from_file(src_dir=src_dir, pgm=pgm) + + +def parse_git_commit( + commit, + src_dir, + git_log=None, +): + """Parse Git commit + + :param str commit: commit message + :param str src_dir: addon source dir + :param dict git_log: dict which store last commit and commnit + date + + :return dict git_log: dict which store last commit and commnit date + """ + if not git_log: + git_log = get_default_git_log(src_dir=src_dir) + if commit: + git_log["commit"], commit_date = commit.strip().split(",") + git_log["date"] = format_git_commit_date_from_local_git( + commit_datetime=commit_date, + ) + return git_log + + +def get_default_git_log(src_dir, datetime_format="%A %b %d %H:%M:%S %Y"): + """Get default Git commit and commit date, when getting commit from + local Git, local JSON file and remote GitHub REST API server wasn't + successful. + + :param str src_dir: addon source dir + :param str datetime_format: output commit datetime format + e.g. Sunday Jan 16 23:09:35 2022 + + :return dict: dict which store last commit and commnit date + """ + return { + "commit": "unknown", + "date": datetime.fromtimestamp(os.path.getmtime(src_dir)).strftime( + datetime_format + ), + } + + +def format_git_commit_date_from_local_git( + commit_datetime, datetime_format="%A %b %d %H:%M:%S %Y" +): + """Format datetime from local Git or JSON file + + :param str commit_datetime: commit datetime + :param str datetime_format: output commit datetime format + e.g. Sunday Jan 16 23:09:35 2022 + + :return str: output formatted commit datetime + """ + try: + date = datetime.fromisoformat( + commit_datetime, + ) + except ValueError: + if commit_datetime.endswith("Z"): + # Python 3.10 and older does not support Z in time, while recent versions + # of Git (2.45.1) use it. Try to help the parsing if Z is in the string. + date = datetime.fromisoformat(commit_datetime[:-1] + "+00:00") + else: + raise + return date.strftime(datetime_format) + + +def get_git_commit_from_rest_api_for_addon_repo( + addon_path, + src_dir, + pgm, + major_version, + git_log=None, +): + """Get Git commit from remote GitHub REST API for addon repository + + :param str addon_path: addon path + :param str src_dir: addon source dir + :param str pgm: program name + :param major_version int: GRASS GIS major version + :param dict git_log: dict which store last commit and commnit date + + :return dict git_log: dict which store last commit and commnit date + """ + # Accessed date time if getting commit from GitHub REST API wasn't successful + if not git_log: + git_log = get_default_git_log(src_dir=src_dir) + if addon_path is not None: + grass_addons_url = ( + "https://api.github.com/repos/osgeo/grass-addons/commits?" + "path={path}&page=1&per_page=1&sha=grass{major}".format( + path=addon_path, + major=major_version, + ) + ) # sha=git_branch_name + + response = download_git_commit( + url=grass_addons_url, + pgm=pgm, + response_format="application/json", + ) + if response: + commit = json.loads(response.read()) + if commit: + git_log["commit"] = commit[0]["sha"] + git_log["date"] = format_git_commit_date_from_rest_api( + commit_datetime=commit[0]["commit"]["author"]["date"], + ) + return git_log + + +def get_git_commit_from_file( + src_dir, + pgm, + git_log=None, +): + """Get Git commit from JSON file + + :param str src_dir: addon source dir + :param str pgm: program name + :param dict git_log: dict which store last commit and commnit date + + :return dict git_log: dict which store last commit and commnit date + """ + # Accessed date time if getting commit from JSON file wasn't successful + if not git_log: + git_log = get_default_git_log(src_dir=src_dir) + json_file_path = os.path.join( + top_dir, + "core_modules_with_last_commit.json", + ) + if os.path.exists(json_file_path): + with open(json_file_path) as f: + core_modules_with_last_commit = json.load(f) + if pgm in core_modules_with_last_commit: + core_module = core_modules_with_last_commit[pgm] + git_log["commit"] = core_module["commit"] + git_log["date"] = format_git_commit_date_from_local_git( + commit_datetime=core_module["date"], + ) + return git_log + + +def download_git_commit(url, pgm, response_format, *args, **kwargs): + """Download module/addon last commit from GitHub API + + :param str url: url address + :param str pgm: program name + :param str response_format: content type + + :return urllib.request.urlopen or None response: response object or + None + """ + try: + response = urlopen(url, *args, **kwargs) + if response.code != 200: + index = HTTP_STATUS_CODES.index(response.code) + desc = HTTP_STATUS_CODES[index].description + gs.fatal( + _( + "Download commit from <{url}>, return status code {code}, {desc}" + ).format( + url=url, + code=response.code, + desc=desc, + ), + ) + if response_format not in response.getheader("Content-Type"): + gs.fatal( + _( + "Wrong downloaded commit file format. " + "Check url <{url}>. Allowed file format is " + "{response_format}." + ).format( + url=url, + response_format=response_format, + ), + ) + return response + except HTTPError as err: + gs.warning( + _( + "The download of the commit from the GitHub API " + "server wasn't successful, <{}>. Commit and commit " + "date will not be included in the <{}> addon html manual " + "page." + ).format(err.msg, pgm), + ) + except URLError: + gs.warning( + _( + "Download file from <{url}>, failed. Check internet " + "connection. Commit and commit date will not be included " + "in the <{pgm}> addon manual page." + ).format(url=url, pgm=pgm), + ) + + +def format_git_commit_date_from_rest_api( + commit_datetime, datetime_format="%A %b %d %H:%M:%S %Y" +): + """Format datetime from remote GitHub REST API + + :param str commit_datetime: commit datetime + :param str datetime_format: output commit datetime format + e.g. Sunday Jan 16 23:09:35 2022 + + :return str: output formatted commit datetime + """ + return datetime.strptime( + commit_datetime, + "%Y-%m-%dT%H:%M:%SZ", # ISO 8601 YYYY-MM-DDTHH:MM:SSZ + ).strftime(datetime_format) + + +def urlopen(url, *args, **kwargs): + """Wrapper around urlopen. Same function as 'urlopen', but with the + ability to define headers. + """ + request = urlrequest.Request(url, headers=HEADERS) + return urlrequest.urlopen(request, *args, **kwargs) + + +def get_addon_path(base_url, pgm, major_version): + """Check if pgm is in the addons list and get addon path + + Make or update list of the official addons source + code paths g.extension prefix parameter plus /grass-addons directory + using Git repository + + :param str base_url: base URL + :param str pgm: program name + :param str major_version: GRASS major version + + :return str|None: pgm path if pgm is addon else None + """ + addons_base_dir = os.getenv("GRASS_ADDON_BASE") + if addons_base_dir and major_version: + grass_addons_dir = Path(addons_base_dir) / "grass-addons" + if gs: + call = gs.call + popen = gs.Popen + fatal = gs.fatal + else: + call = subprocess.call + popen = subprocess.Popen + fatal = sys.stderr.write + addons_branch = get_version_branch( + major_version=major_version, + addons_git_repo_url=urlparse.urljoin(base_url, "grass-addons/"), + ) + if not Path(addons_base_dir).exists(): + Path(addons_base_dir).mkdir(parents=True, exist_ok=True) + if not grass_addons_dir.exists(): + call( + [ + "git", + "clone", + "-q", + "--no-checkout", + f"--branch={addons_branch}", + "--filter=blob:none", + urlparse.urljoin(base_url, "grass-addons/"), + ], + cwd=addons_base_dir, + ) + addons_file_list = popen( + ["git", "ls-tree", "--name-only", "-r", addons_branch], + cwd=grass_addons_dir, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + addons_file_list, stderr = addons_file_list.communicate() + if stderr: + message = ( + "Failed to get addons files list from the" + " Git repository <{repo_path}>.\n{error}" + ) + if gs: + fatal( + _( + message, + ).format( + repo_path=grass_addons_dir, + error=gs.decode(stderr), + ) + ) + else: + message += "\n" + fatal( + message.format( + repo_path=grass_addons_dir, + error=stderr.decode(), + ) + ) + addon_paths = re.findall( + rf".*{pgm}*.", + gs.decode(addons_file_list) if gs else addons_file_list.decode(), + ) + for addon_path in addon_paths: + if pgm == Path(addon_path).name: + return addon_path diff --git a/utils/mkhtml.py b/utils/mkhtml.py index 39a5e6e26eb..0864356889c 100644 --- a/utils/mkhtml.py +++ b/utils/mkhtml.py @@ -7,7 +7,7 @@ # Glynn Clements # Martin Landa <landa.martin gmail.com> # PURPOSE: Create HTML manual page snippets -# COPYRIGHT: (C) 2007-2024 by Glynn Clements +# COPYRIGHT: (C) 2007-2025 by Glynn Clements # and the GRASS Development Team # # This program is free software under the GNU General @@ -16,22 +16,16 @@ # ############################################################################# -import http import sys import os import string import re from datetime import datetime import locale -import json -import pathlib -import subprocess -from pathlib import Path from html.parser import HTMLParser from urllib import request as urlrequest -from urllib.error import HTTPError, URLError import urllib.parse as urlparse try: @@ -40,52 +34,13 @@ # During compilation GRASS GIS gs = None -from generate_last_commit_file import COMMIT_DATE_FORMAT - -HEADERS = { - "User-Agent": "Mozilla/5.0", -} -HTTP_STATUS_CODES = list(http.HTTPStatus) - - -def get_version_branch(major_version, addons_git_repo_url): - """Check if version branch for the current GRASS version exists, - if not, take branch for the previous version - For the official repo we assume that at least one version branch is present - - :param major_version int: GRASS GIS major version - :param addons_git_repo_url str: Addons Git ropository URL - - :return version_branch str: version branch - """ - version_branch = f"grass{major_version}" - if gs: - branch = gs.Popen( - [ - "git", - "ls-remote", - "--heads", - addons_git_repo_url, - f"refs/heads/{version_branch}", - ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - branch, stderr = branch.communicate() - if stderr: - gs.fatal( - _( - "Failed to get branch from the Git repository" - " <{repo_path}>.\n{error}" - ).format( - repo_path=addons_git_repo_url, - error=gs.decode(stderr), - ) - ) - if version_branch not in gs.decode(branch): - version_branch = "grass{}".format(int(major_version) - 1) - return version_branch - +from mkdocs import ( + read_file, + get_version_branch, + get_last_git_commit, + top_dir as topdir, + get_addon_path, +) grass_version = os.getenv("VERSION_NUMBER", "unknown") trunk_url = "" @@ -125,14 +80,6 @@ def _get_encoding(): return encoding -def urlopen(url, *args, **kwargs): - """Wrapper around urlopen. Same function as 'urlopen', but with the - ability to define headers. - """ - request = urlrequest.Request(url, headers=HEADERS) - return urlrequest.urlopen(request, *args, **kwargs) - - def set_proxy(): """Set proxy""" proxy = os.getenv("GRASS_PROXY") @@ -148,274 +95,6 @@ def set_proxy(): set_proxy() -def download_git_commit(url, response_format, *args, **kwargs): - """Download module/addon last commit from GitHub API - - :param str url: url address - :param str response_format: content type - - :return urllib.request.urlopen or None response: response object or - None - """ - try: - response = urlopen(url, *args, **kwargs) - if response.code != 200: - index = HTTP_STATUS_CODES.index(response.code) - desc = HTTP_STATUS_CODES[index].description - gs.fatal( - _( - "Download commit from <{url}>, return status code {code}, {desc}" - ).format( - url=url, - code=response.code, - desc=desc, - ), - ) - if response_format not in response.getheader("Content-Type"): - gs.fatal( - _( - "Wrong downloaded commit file format. " - "Check url <{url}>. Allowed file format is " - "{response_format}." - ).format( - url=url, - response_format=response_format, - ), - ) - return response - except HTTPError as err: - gs.warning( - _( - "The download of the commit from the GitHub API " - "server wasn't successful, <{}>. Commit and commit " - "date will not be included in the <{}> addon html manual " - "page." - ).format(err.msg, pgm), - ) - except URLError: - gs.warning( - _( - "Download file from <{url}>, failed. Check internet " - "connection. Commit and commit date will not be included " - "in the <{pgm}> addon manual page." - ).format(url=url, pgm=pgm), - ) - - -def get_default_git_log(src_dir, datetime_format="%A %b %d %H:%M:%S %Y"): - """Get default Git commit and commit date, when getting commit from - local Git, local JSON file and remote GitHub REST API server wasn't - successful. - - :param str src_dir: addon source dir - :param str datetime_format: output commit datetime format - e.g. Sunday Jan 16 23:09:35 2022 - - :return dict: dict which store last commit and commnit date - """ - return { - "commit": "unknown", - "date": datetime.fromtimestamp(os.path.getmtime(src_dir)).strftime( - datetime_format - ), - } - - -def parse_git_commit( - commit, - src_dir, - git_log=None, -): - """Parse Git commit - - :param str commit: commit message - :param str src_dir: addon source dir - :param dict git_log: dict which store last commit and commnit - date - - :return dict git_log: dict which store last commit and commnit date - """ - if not git_log: - git_log = get_default_git_log(src_dir=src_dir) - if commit: - git_log["commit"], commit_date = commit.strip().split(",") - git_log["date"] = format_git_commit_date_from_local_git( - commit_datetime=commit_date, - ) - return git_log - - -def get_git_commit_from_file( - src_dir, - git_log=None, -): - """Get Git commit from JSON file - - :param str src_dir: addon source dir - :param dict git_log: dict which store last commit and commnit date - - :return dict git_log: dict which store last commit and commnit date - """ - # Accessed date time if getting commit from JSON file wasn't successful - if not git_log: - git_log = get_default_git_log(src_dir=src_dir) - json_file_path = os.path.join( - topdir, - "core_modules_with_last_commit.json", - ) - if os.path.exists(json_file_path): - with open(json_file_path) as f: - core_modules_with_last_commit = json.load(f) - if pgm in core_modules_with_last_commit: - core_module = core_modules_with_last_commit[pgm] - git_log["commit"] = core_module["commit"] - git_log["date"] = format_git_commit_date_from_local_git( - commit_datetime=core_module["date"], - ) - return git_log - - -def get_git_commit_from_rest_api_for_addon_repo( - addon_path, - src_dir, - git_log=None, -): - """Get Git commit from remote GitHub REST API for addon repository - - :param str addon_path: addon path - :param str src_dir: addon source dir - :param dict git_log: dict which store last commit and commnit date - - :return dict git_log: dict which store last commit and commnit date - """ - # Accessed date time if getting commit from GitHub REST API wasn't successful - if not git_log: - git_log = get_default_git_log(src_dir=src_dir) - if addon_path is not None: - grass_addons_url = ( - "https://api.github.com/repos/osgeo/grass-addons/commits?" - "path={path}&page=1&per_page=1&sha=grass{major}".format( - path=addon_path, - major=major, - ) - ) # sha=git_branch_name - - response = download_git_commit( - url=grass_addons_url, - response_format="application/json", - ) - if response: - commit = json.loads(response.read()) - if commit: - git_log["commit"] = commit[0]["sha"] - git_log["date"] = format_git_commit_date_from_rest_api( - commit_datetime=commit[0]["commit"]["author"]["date"], - ) - return git_log - - -def format_git_commit_date_from_rest_api( - commit_datetime, datetime_format="%A %b %d %H:%M:%S %Y" -): - """Format datetime from remote GitHub REST API - - :param str commit_datetime: commit datetime - :param str datetime_format: output commit datetime format - e.g. Sunday Jan 16 23:09:35 2022 - - :return str: output formatted commit datetime - """ - return datetime.strptime( - commit_datetime, - "%Y-%m-%dT%H:%M:%SZ", # ISO 8601 YYYY-MM-DDTHH:MM:SSZ - ).strftime(datetime_format) - - -def format_git_commit_date_from_local_git( - commit_datetime, datetime_format="%A %b %d %H:%M:%S %Y" -): - """Format datetime from local Git or JSON file - - :param str commit_datetime: commit datetime - :param str datetime_format: output commit datetime format - e.g. Sunday Jan 16 23:09:35 2022 - - :return str: output formatted commit datetime - """ - try: - date = datetime.fromisoformat( - commit_datetime, - ) - except ValueError: - if commit_datetime.endswith("Z"): - # Python 3.10 and older does not support Z in time, while recent versions - # of Git (2.45.1) use it. Try to help the parsing if Z is in the string. - date = datetime.fromisoformat(commit_datetime[:-1] + "+00:00") - else: - raise - return date.strftime(datetime_format) - - -def has_src_code_git(src_dir, is_addon): - """Has core module or addon source code Git - - :param str src_dir: core module or addon root directory - :param bool is_addon: True if it is addon - - :return subprocess.CompletedProcess or None: subprocess.CompletedProcess - if core module or addon - source code has Git - """ - actual_dir = Path.cwd() - if is_addon: - os.chdir(src_dir) - else: - os.chdir(topdir) - try: - process_result = subprocess.run( - [ - "git", - "log", - "-1", - f"--format=%H,{COMMIT_DATE_FORMAT}", - src_dir, - ], - capture_output=True, - ) # --format=%H,COMMIT_DATE_FORMAT commit hash,author date - os.chdir(actual_dir) - return process_result if process_result.returncode == 0 else None - except FileNotFoundError: - os.chdir(actual_dir) - return None - - -def get_last_git_commit(src_dir, addon_path, is_addon): - """Get last module/addon git commit - - :param str src_dir: module/addon source dir - :param str addon_path: addon path - :param bool is_addon: True if it is addon - - :return dict git_log: dict with key commit and date, if not - possible download commit from GitHub REST API - server values of keys have "unknown" string - """ - process_result = has_src_code_git(src_dir=src_dir, is_addon=is_addon) - if process_result: - return parse_git_commit( - commit=process_result.stdout.decode(), - src_dir=src_dir, - ) - if gs: - # Addons installation - return get_git_commit_from_rest_api_for_addon_repo( - addon_path=addon_path, - src_dir=src_dir, - ) - # During GRASS GIS compilation from source code without Git - return get_git_commit_from_file(src_dir=src_dir) - - html_page_footer_pages_path = os.getenv("HTML_PAGE_FOOTER_PAGES_PATH") or "" pgm = sys.argv[1] @@ -509,13 +188,6 @@ def get_last_git_commit(src_dir, addon_path, is_addon): ) -def read_file(name): - try: - return Path(name).read_text() - except OSError: - return "" - - def create_toc(src_data): class MyHTMLParser(HTMLParser): def __init__(self): @@ -562,7 +234,7 @@ def handle_data(self, data): def escape_href(label): # remove html tags - label = re.sub("<[^<]+?>", "", label) + label = re.sub(r"<[^<]+?>", "", label) # fix   label = label.replace(" ", "") # fix " @@ -677,95 +349,18 @@ def update_toc(data): return "\n".join(ret_data) -def get_addon_path(): - """Check if pgm is in the addons list and get addon path - - Make or update list of the official addons source - code paths g.extension prefix parameter plus /grass-addons directory - using Git repository - - :return str|None: pgm path if pgm is addon else None - """ - addons_base_dir = os.getenv("GRASS_ADDON_BASE") - if addons_base_dir and major: - grass_addons_dir = pathlib.Path(addons_base_dir) / "grass-addons" - if gs: - call = gs.call - popen = gs.Popen - fatal = gs.fatal - else: - call = subprocess.call - popen = subprocess.Popen - fatal = sys.stderr.write - addons_branch = get_version_branch( - major_version=major, - addons_git_repo_url=urlparse.urljoin(base_url, "grass-addons/"), - ) - if not pathlib.Path(addons_base_dir).exists(): - pathlib.Path(addons_base_dir).mkdir(parents=True, exist_ok=True) - if not grass_addons_dir.exists(): - call( - [ - "git", - "clone", - "-q", - "--no-checkout", - f"--branch={addons_branch}", - "--filter=blob:none", - urlparse.urljoin(base_url, "grass-addons/"), - ], - cwd=addons_base_dir, - ) - addons_file_list = popen( - ["git", "ls-tree", "--name-only", "-r", addons_branch], - cwd=grass_addons_dir, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - addons_file_list, stderr = addons_file_list.communicate() - if stderr: - message = ( - "Failed to get addons files list from the" - " Git repository <{repo_path}>.\n{error}" - ) - if gs: - fatal( - _( - message, - ).format( - repo_path=grass_addons_dir, - error=gs.decode(stderr), - ) - ) - else: - message += "\n" - fatal( - message.format( - repo_path=grass_addons_dir, - error=stderr.decode(), - ) - ) - addon_paths = re.findall( - rf".*{pgm}*.", - gs.decode(addons_file_list) if gs else addons_file_list.decode(), - ) - for addon_path in addon_paths: - if pgm == pathlib.Path(addon_path).name: - return addon_path - - # process header src_data = read_file(src_file) -name = re.search("(<!-- meta page name:)(.*)(-->)", src_data, re.IGNORECASE) +name = re.search(r"(<!-- meta page name:)(.*)(-->)", src_data, re.IGNORECASE) pgm_desc = "GRASS GIS Reference Manual" if name: pgm = name.group(2).strip().split("-", 1)[0].strip() name_desc = re.search( - "(<!-- meta page name description:)(.*)(-->)", src_data, re.IGNORECASE + r"(<!-- meta page name description:)(.*)(-->)", src_data, re.IGNORECASE ) if name_desc: pgm_desc = name_desc.group(2).strip() -desc = re.search("(<!-- meta page description:)(.*)(-->)", src_data, re.IGNORECASE) +desc = re.search(r"(<!-- meta page description:)(.*)(-->)", src_data, re.IGNORECASE) if desc: pgm = desc.group(2).strip() header_tmpl = string.Template(header_base + header_nopgm) @@ -774,7 +369,7 @@ def get_addon_path(): else: header_tmpl = string.Template(header_base + header_pgm_desc) -if not re.search("<html>", src_data, re.IGNORECASE): +if not re.search(r"<html>", src_data, re.IGNORECASE): tmp_data = read_file(tmp_file) """ Adjusting keywords html pages paths if add-on html man page @@ -800,7 +395,7 @@ def get_addon_path(): orig_keywords_paths.group(1), ",".join(new_keywords_paths), ) - if not re.search("<html>", tmp_data, re.IGNORECASE): + if not re.search(r"<html>", tmp_data, re.IGNORECASE): sys.stdout.write(header_tmpl.substitute(PGM=pgm, PGM_DESC=pgm_desc)) if tmp_data: @@ -808,7 +403,7 @@ def get_addon_path(): for line in tmp_data.splitlines(True): # The cleanup happens on Makefile level too. if not re.search( - "</body>|</html>|</div> <!-- end container -->", line, re.IGNORECASE + r"</body>|</html>|</div> <!-- end container -->", line, re.IGNORECASE ): if header_logo_img_el in line: sys.stdout.write(line) @@ -825,7 +420,7 @@ def get_addon_path(): # if </html> is found, suppose a complete html is provided. # otherwise, generate module class reference: -if re.search("</html>", src_data, re.IGNORECASE): +if re.search(r"</html>", src_data, re.IGNORECASE): sys.exit() index_names = { @@ -858,7 +453,7 @@ def to_title(name): index_titles[key] = to_title(name) # process footer -index = re.search("(<!-- meta page index:)(.*)(-->)", src_data, re.IGNORECASE) +index = re.search(r"(<!-- meta page index:)(.*)(-->)", src_data, re.IGNORECASE) if index: index_name = index.group(2).strip() if "|" in index_name: @@ -875,7 +470,6 @@ def to_title(name): year = str(datetime.now().year) # check the names of scripts to assign the right folder -topdir = os.path.abspath(os.getenv("MODULE_TOPDIR")) curdir = os.path.abspath(os.path.curdir) if curdir.startswith(topdir + os.path.sep): source_url = trunk_url @@ -887,7 +481,7 @@ def to_title(name): url_source = "" addon_path = None if os.getenv("SOURCE_URL", ""): - addon_path = get_addon_path() + addon_path = get_addon_path(base_url=base_url, pgm=pgm, major_version=major) if addon_path: # Addon is installed from the local dir if os.path.exists(os.getenv("SOURCE_URL")): @@ -918,8 +512,10 @@ def to_title(name): git_commit = get_last_git_commit( src_dir=curdir, + top_dir=topdir, + pgm=pgm, addon_path=addon_path or None, - is_addon=bool(addon_path), + major_version=major, ) if git_commit["commit"] == "unknown": date_tag = "Accessed: {date}".format(date=git_commit["date"]) diff --git a/utils/mkmarkdown.py b/utils/mkmarkdown.py new file mode 100644 index 00000000000..b7f595fc3c4 --- /dev/null +++ b/utils/mkmarkdown.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python3 + +############################################################################ +# +# MODULE: Builds manual pages (Markdown) +# AUTHOR(S): Markus Neteler +# Glynn Clements +# Martin Landa <landa.martin gmail.com> +# PURPOSE: Create Markdown manual page snippets +# Inspired by mkhtml.py +# COPYRIGHT: (C) 2024 by the GRASS Development Team +# +# This program is free software under the GNU General +# Public License (>=v2). Read the file COPYING that +# comes with GRASS for details. +# +############################################################################# + +import os +import sys +import string +import urllib.parse as urlparse + +try: + import grass.script as gs +except ImportError: + # During compilation GRASS GIS + gs = None + +from mkdocs import ( + read_file, + get_version_branch, + get_last_git_commit, + top_dir, + get_addon_path, +) + + +def parse_source(pgm): + """Parse source code to get source code and log message URLs, + and date time of the last modification. + + :param str pgm: program name + + :return url_source, url_log, date_time + """ + grass_version = os.getenv("VERSION_NUMBER", "unknown") + main_url = "" + addons_url = "" + grass_git_branch = "main" + major, minor, patch = None, None, None + if grass_version != "unknown": + major, minor, patch = grass_version.split(".") + base_url = "https://github.com/OSGeo/" + main_url = urlparse.urljoin( + base_url, + urlparse.urljoin( + "grass/tree/", + grass_git_branch + "/", + ), + ) + addons_url = urlparse.urljoin( + base_url, + urlparse.urljoin( + "grass-addons/tree/", + get_version_branch( + major, + urlparse.urljoin(base_url, "grass-addons/"), + ), + ), + ) + + cur_dir = os.path.abspath(os.path.curdir) + if cur_dir.startswith(top_dir + os.path.sep): + source_url = main_url + pgmdir = cur_dir.replace(top_dir, "").lstrip(os.path.sep) + else: + # addons + source_url = addons_url + pgmdir = os.path.sep.join(cur_dir.split(os.path.sep)[-3:]) + + url_source = "" + addon_path = None + if os.getenv("SOURCE_URL", ""): + addon_path = get_addon_path(base_url=base_url, pgm=pgm, major_version=major) + if addon_path: + # Addon is installed from the local dir + if os.path.exists(os.getenv("SOURCE_URL")): + url_source = urlparse.urljoin( + addons_url, + addon_path, + ) + else: + url_source = urlparse.urljoin( + os.environ["SOURCE_URL"].split("src")[0], + addon_path, + ) + else: + url_source = urlparse.urljoin(source_url, pgmdir) + if sys.platform == "win32": + url_source = url_source.replace(os.path.sep, "/") + + # Process Source code section + branches = "branches" + tree = "tree" + commits = "commits" + + if branches in url_source: + url_log = url_source.replace(branches, commits) + url_source = url_source.replace(branches, tree) + else: + url_log = url_source.replace(tree, commits) + + git_commit = get_last_git_commit( + src_dir=cur_dir, + top_dir=top_dir, + pgm=pgm, + addon_path=addon_path or None, + major_version=major, + ) + if git_commit["commit"] == "unknown": + date_tag = "Accessed: {date}".format(date=git_commit["date"]) + else: + commit = git_commit["commit"] + date_tag = ( + "Latest change: {date} in commit: " + "[{commit_short}](https://github.com/OSGeo/grass/commit/{commit})".format( + date=git_commit["date"], commit=commit, commit_short=commit[:7] + ) + ) + + return url_source, url_log, date_tag + + +if __name__ == "__main__": + pgm = sys.argv[1] + + src_file = f"{pgm}.md" + tmp_file = f"{pgm}.tmp.md" + + sourcecode = string.Template( + """ +## SOURCE CODE + +Available at: [${PGM} source code](${URL_SOURCE}) +([history](${URL_LOG}))${MD_NEWLINE} +${DATE_TAG} +""" + ) + + # process header/usage generated by --md-description + sys.stdout.write(read_file(tmp_file)) + sys.stdout.write("\n") + # process body + sys.stdout.write(read_file(src_file)) + + # process footer + url_source, url_log, date_tag = parse_source(pgm) + sys.stdout.write( + sourcecode.substitute( + URL_SOURCE=url_source, + PGM=pgm, + URL_LOG=url_log, + DATE_TAG=date_tag, + MD_NEWLINE=" ", + ) + ) diff --git a/utils/mkrest.py b/utils/mkrest.py index 8d156e0bcc7..6270b78c6b2 100755 --- a/utils/mkrest.py +++ b/utils/mkrest.py @@ -66,7 +66,7 @@ def read_file(name): src_data = read_file(src_file) -title = re.search("(<!-- meta page description:)(.*)(-->)", src_data, re.IGNORECASE) +title = re.search(r"(<!-- meta page description:)(.*)(-->)", src_data, re.IGNORECASE) if title: title_name = title.group(2).strip() @@ -108,7 +108,7 @@ def read_file(name): "v": "vector", } -index = re.search("(<!-- meta page index:)(.*)(-->)", src_data, re.IGNORECASE) +index = re.search(r"(<!-- meta page index:)(.*)(-->)", src_data, re.IGNORECASE) if index: index_name = index.group(2).strip() diff --git a/utils/ppmrotate.py b/utils/ppmrotate.py index cef32eba0f4..acceba64368 100755 --- a/utils/ppmrotate.py +++ b/utils/ppmrotate.py @@ -53,7 +53,8 @@ def read_ppm(src): j = text.find("\n", i) maxval = text[i:j] if int(maxval) != 255: - raise OSError("Max value in image != 255") + msg = "Max value in image != 255" + raise OSError(msg) i = j + 1 return array.array("B", text[i:]) diff --git a/utils/pylintrc.txt b/utils/pylintrc.txt deleted file mode 100644 index 2b812c4ace2..00000000000 --- a/utils/pylintrc.txt +++ /dev/null @@ -1,287 +0,0 @@ -# This is a configuration file for pylint. - -# (C) 2011-2013 by the GRASS Development Team - -# This program is free software under the GNU General Public License -# (>=v2). Read the file COPYING that comes with GRASS for details. - -# author: Vaclav Petras <wenzeslaus gmail.com> - -# This configuration should work for all Python library, Python scripts and -# wxPython GUI. -# Example for gui/wxpython: -# pylint --rcfile=../../utils/pylintrc.txt -f parseable -r n -i y lmgr/ - -# A lot of errors need to be ignored now to avoid a large number of messages. -# Files need to be fixed one by one (partially without this configuration). - -# Save this file ~/.pylintrc on Linux - - -[MASTER] - -# Specify a configuration file. -#rcfile= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Profiled execution. -profile=no - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Pickle collected data for later comparisons. -persistent=no - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - - -[MESSAGES CONTROL] - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time. -#enable= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). -disable=E1103,W0141,W0201,W0311,W0312,W0612,W0613,W0614,W0622,R0201,R0901,R0904,C0103,C0111,C0322,C0323,C0324,F0401 - -# E1103: caused probably by wxPython -# W0613: unused arg -> solve somewhere here using event -# C0103: names not defined -# W0201: define methods used for init -# W0622: Redefining built-in %r: needs complex solution - -# E1101: %s %r has no %r member: why? -# R0201: can be solved -# F0401: something is possible to import only in grass - - -[REPORTS] - -# Set the output format. Available formats are text, parseable, colorized, msvs -# (visual studio) and html -output-format=text - -# Include message's id in output -include-ids=no - -# Put messages in a separate file for each module / package specified on the -# command line instead of printing them on stdout. Reports (if any) will be -# written in a file name "pylint_global.[txt|html]". -files-output=no - -# Tells whether to display a full report or only the messages -reports=yes - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Add a comment according to your evaluation note. This is used by the global -# evaluation report (RP0004). -comment=no - - -[VARIABLES] - -# Tells whether we should check for unused import in __init__ files. -init-import=yes - -# A regular expression matching the beginning of the name of dummy variables -# (i.e. not used). -dummy-variables-rgx=dummy|unused - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins=_ - - -[BASIC] - -# Required attributes for module, separated by a comma -required-attributes= - -# List of builtins function names that should not be used, separated by a comma -bad-functions=map,filter,apply,input - -# Regular expression which should only match correct module names -module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ - -# Regular expression which should only match correct module level names -const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ - -# Regular expression which should only match correct class names -class-rgx=[A-Z_][a-zA-Z0-9]+$ - -# Regular expression which should only match correct function names -function-rgx=[a-z_][a-zA-Z0-9_]{2,30}$ - -# Regular expression which should only match correct method names -method-rgx=[a-zA-Z_][a-zA-Z0-9_]{2,30}$ - -# Regular expression which should only match correct instance attribute names -attr-rgx=[a-z_][a-zA-Z0-9_]{2,30}$ - -# Regular expression which should only match correct argument names -argument-rgx=[a-z_][a-zA-Z0-9_]{2,30}$ - -# Regular expression which should only match correct variable names -variable-rgx=[a-z_][a-zA-Z0-9_]{2,30}$|[a-z] - -# Regular expression which should only match correct list comprehension / -# generator expression variable names -inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ - -# Good variable names which should always be accepted, separated by a comma -good-names=i,j,_,x,y,z,N,E,S,W,id - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo,bar,baz,toto,tutu,tata - -# Regular expression which should only match functions or classes name which do -# not require a docstring -no-docstring-rgx=__.*__ - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME,TODO,\\todo,@todo - -# general regexp for convention, warning etc. would be great - -[TYPECHECK] - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# List of classes names for which member attributes should not be checked -# (useful for classes with attributes dynamically set). -ignored-classes=SQLObject - -# When zope mode is activated, add a predefined set of Zope acquired attributes -# to generated-members. -zope=no - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E0201 when accessed. Python regular -# expressions are accepted. -generated-members=REQUEST,acl_users,aq_parent - - -[FORMAT] - -# Maximum number of characters on a single line. -# Increased form 80. -max-line-length=150 - -# Maximum number of lines in a module -# Increased from 1000. -max-module-lines=2000 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - - -[SIMILARITIES] - -# Minimum lines number of a similarity. -min-similarity-lines=4 - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - - -[DESIGN] - -# Maximum number of arguments for function / method -# Increased from 5. -max-args=10 - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|event - -# Maximum number of locals for function / method body -# Changed from 15. -max-locals=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of branch for function / method body -# Changed from 12. -max-branchs=20 - -# Maximum number of statements in function / method body -max-statements=50 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of attributes for a class (see R0902). -# Changed from 7. -max-attributes=10 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# Maximum number of public methods for a class (see R0904). -# Increased from 20 -max-public-methods=30 - - -[IMPORTS] - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub,string,TERMIOS,Bastion,rexec - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - - -[CLASSES] - -# List of interface methods to ignore, separated by a comma. This is used for -# instance to not check methods defines in Zope's Interface base class. -ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__,__new__,setUp - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=Exception diff --git a/utils/release.yml b/utils/release.yml index eb601419274..c0d9cc17400 100644 --- a/utils/release.yml +++ b/utils/release.yml @@ -6,7 +6,7 @@ notes: example: 'r.slope.aspect:' - title: Graphical User Interface - regexp: '(wxGUI.*|gui|GUI): ' + regexp: '(wxGUI.*|gui|GUI)(\(\w[\w.-]*\))?: ' example: 'wxGUI:' - title: Python @@ -14,7 +14,7 @@ notes: example: 'grass.script:' - title: Documentation and Messages - regexp: '(docs?|man|manual|manual pages|[Ss]phinx|mkhtml|messages?): ' + regexp: '(docs?|man|manual|manual pages|[Ss]phinx|mkhtml|MkDocs|mkdocs|messages?): ' example: 'doc:' - title: Libraries and General Functionality diff --git a/vector/v.buffer/v.buffer.html b/vector/v.buffer/v.buffer.html index 06b2828cdfb..b2e11ac47cd 100644 --- a/vector/v.buffer/v.buffer.html +++ b/vector/v.buffer/v.buffer.html @@ -10,12 +10,13 @@ <h2>NOTES</h2> Internal buffers for areas can be generated with negative distance values ("inward buffer" or "negative buffer" or "shrinking"). + <p> <em>v.buffer</em> fusions the geometries of buffers by default. Categories and attribute table will not be transferred (this would not make sense as one buffer geometry can be the result of many different input geometries). To transfer the categories and -attributes the user can set the <b>t</b> flag. This will result in +attributes the user can set the <b>-t</b> flag. This will result in buffers being cut up where buffers of individual input geometries overlap. Each part that is the result of overlapping buffers of multiple geometries will have multiple categories corresponding to @@ -29,11 +30,11 @@ <h2>NOTES</h2> the GEOS library. <p> -<i>For advanced users:</i> built-in buffer algorithm no longer -desired, we use GEOS: If GRASS is not compiled with GEOS support -or <a href="variables.html">environmental -variable</a> <tt>GRASS_VECTOR_BUFFER</tt> is defined, then GRASS -generates buffers using built-in buffering algorithm (which is still +<i>For advanced users:</i> the built-in buffer algorithm is no longer +used, as we use GEOS instead. If GRASS was not compiled with GEOS support +or the <a href="variables.html">environmental +variable</a> <code>GRASS_VECTOR_BUFFER</code> is defined, then GRASS +generates buffers using the built-in buffering algorithm (which is still buggy for some input data). <p> @@ -56,8 +57,8 @@ <h3>Corner settings</h3> <img src="v_buffer_line.png"> </center> -Straight corners with caps are created by <b>-s</b> flag (red color on -the figure below), while <b>-c</b> flag doesn't make caps at the ends of +Straight corners with caps are created using the <b>-s</b> flag (red color on +the figure below), while the <b>-c</b> flag doesn't make caps at the ends of polylines (green color on the figure below): <center> @@ -65,8 +66,8 @@ <h3>Corner settings</h3> <img src="v_buffer_line_c.png"> </center> -Using <b>-s</b> with a point vector map as input data, square buffers are -created instead of round buffers. +With a point vector map as input data, square buffers are created instead +of round buffers by using the <b>-s</b> flag. <center> <img src="v_buffer_point_s.png"> diff --git a/vector/v.build.polylines/walk.c b/vector/v.build.polylines/walk.c index 9507c587319..2d168fb6662 100644 --- a/vector/v.build.polylines/walk.c +++ b/vector/v.build.polylines/walk.c @@ -166,6 +166,8 @@ int walk_forward_and_pick_up_coords(struct Map_info *map, int start_line, next_node = n2; /* continue at end node */ } else { + if (cats_tmp) + Vect_destroy_cats_struct(cats_tmp); return 1; /* no other line */ } } diff --git a/vector/v.build/v.build.html b/vector/v.build/v.build.html index c66da69110a..20fd726d44a 100644 --- a/vector/v.build/v.build.html +++ b/vector/v.build/v.build.html @@ -76,7 +76,8 @@ <h2>SEE ALSO</h2> <a href="v.build.all.html">v.build.all</a>, <a href="v.build.polylines.html">v.build.polylines</a>, <a href="v.edit.html">v.edit</a>, -<a href="v.split.html">v.split</a> +<a href="v.split.html">v.split</a>, +<a href="v.support.html">v.support</a> </em> <p>See also <em><a href="wxGUI.vdigit.html">wxGUI vector digitizer</a></em>. diff --git a/vector/v.class/v.class.html b/vector/v.class/v.class.html index 026f81c7e73..a530ca39e8e 100644 --- a/vector/v.class/v.class.html +++ b/vector/v.class/v.class.html @@ -35,19 +35,20 @@ <h2>NOTES</h2> are those of the number of breaks asked for. <p>The <em>discont</em> algorithm systematically searches discontinuities -in the slope of the cumulated frequencies curve, by approximating this +in the slope of the cumulative frequencies curve, by approximating this curve through straight line segments whose vertices define the class breaks. The first approximation is a straight line which links the two end nodes of the curve. This line is then replaced by a two-segmented polyline whose central node is the point on the curve which is farthest from the preceding straight line. The point on the curve furthest from this new polyline is then chosen as a new node to create break up one of -the two preceding segments, and so forth. The problem of the difference -in terms of units between the two axes is solved by rescaling both -amplitudes to an interval between 0 and 1. In the original algorithm, -the process is stopped when the difference between the slopes of the two -new segments is no longer significant (alpha = 0.05). As the slope is -the ratio between the frequency and the amplitude of the corresponding +the two preceding segments, and so forth. + +<p>The problem of the difference in terms of units between the two axes +is solved by rescaling both amplitudes to an interval between 0 and 1. +In the original algorithm, the process is stopped when the difference between +the slopes of the two new segments is no longer significant (alpha = 0.05). As +the slope is the ratio between the frequency and the amplitude of the corresponding interval, i.e. its density, this effectively tests whether the frequencies of the two newly proposed classes are different from those obtained by simply distributing the sum of their frequencies amongst them in proportion diff --git a/vector/v.clean/v.clean.html b/vector/v.clean/v.clean.html index 6da10283703..fcaa9d212c4 100644 --- a/vector/v.clean/v.clean.html +++ b/vector/v.clean/v.clean.html @@ -184,10 +184,10 @@ <h3>Remove small angles between lines at nodes</h3> run with several tools. <p> <table><tr><td> -<img src="v_clean_rmsa.png" border=1> +<img src="v_clean_rmsa.png" border="1"> </td></tr> -<tr><td align=center> -<font size=-1><em>tool=rmsa</em></font> +<tr><td align="center"> +<font size="-1"><em>tool=rmsa</em></font> </td></tr> </table> <p> diff --git a/vector/v.cluster/v.cluster.html b/vector/v.cluster/v.cluster.html index 9447538afab..fd70f1ebaa9 100644 --- a/vector/v.cluster/v.cluster.html +++ b/vector/v.cluster/v.cluster.html @@ -20,7 +20,9 @@ <h2>DESCRIPTION</h2> <b>distance</b> or <b>method=density</b> if clusters should be created separately for each observed density (distance to the farthest neighbor). -<h4>dbscan</h4> +<h3>Clustering methods</h3> + +<h4>dbscan method</h4> The <a href="https://en.wikipedia.org/wiki/DBSCAN">Density-Based Spatial Clustering of Applications with Noise</a> is a commonly used clustering algorithm. A new cluster is started for a point with at least @@ -29,12 +31,12 @@ <h4>dbscan</h4> least <i>min</i> - 1 neighbors are within the maximum distance for each point already in the cluster. -<h4>dbscan2</h4> +<h4>dbscan2 method</h4> Similar to <i>dbscan</i>, but here it is sufficient if the resultant cluster consists of at least <b>min</b> points, even if no point in the cluster has at least <i>min - 1</i> neighbors within <b>distance</b>. -<h4>density</h4> +<h4>density method</h4> This method creates clusters according to their point density. The maximum distance is not used. Instead, the points are sorted ascending by the distance to their farthest neighbor (core distance), inspecting @@ -44,9 +46,9 @@ <h4>density</h4> own maximum distance. This method can identify clusters with different densities and can create nested clusters. -<h4>optics</h4> -This method is <a -href="https://en.wikipedia.org/wiki/OPTICS_algorithm">Ordering Points to +<h4>optics method</h4> +This method is +<a href="https://en.wikipedia.org/wiki/OPTICS_algorithm">Ordering Points to Identify the Clustering Structure</a>. It is controlled by the number of neighbor points (option <i>min</i> - 1). The core distance of a point is the distance to the farthest neighbor. The reachability of a @@ -75,7 +77,7 @@ <h4>optics</h4> cluster. The order of the input points is arbitrary and can thus influence the resultant clusters. -<h4>optics2</h4> +<h4>optics2 method</h4> <b>EXPERIMENTAL</b> This method is similar to OPTICS, minimizing the reachability of each point. Points are reconnected if their reachability can be reduced. Contrary to OPTICS, a cluster's seed is @@ -176,6 +178,7 @@ <h2>EXAMPLE</h2> Generate random points for analysis (100 points per area), use different method for clustering and visualize using color stored the attribute table. + <div class="code"><pre> # pick a subregion of the vector urbanarea g.region -p n=272950 s=188330 w=574720 e=703090 res=10 diff --git a/vector/v.colors/v.colors.html b/vector/v.colors/v.colors.html index 440ed5763ec..654413ba040 100644 --- a/vector/v.colors/v.colors.html +++ b/vector/v.colors/v.colors.html @@ -42,7 +42,7 @@ <h2>EXAMPLES</h2> <h3>Define color table based on categories</h3> -Define color table <tt>wave</tt> based on categories from layer 1 +Define color table <code>wave</code> based on categories from layer 1 <div class="code"><pre> v.colors map=soils_general layer=1 color=wave @@ -50,8 +50,8 @@ <h3>Define color table based on categories</h3> <h3>Define color table based on attribute values</h3> -Define color table <tt>ryg</tt> based on values from attribute -column <tt>AREA</tt>. Attribute table is linked to layer 1. +Define color table <code>ryg</code> based on values from attribute +column <code>AREA</code>. Attribute table is linked to layer 1. <div class="code"><pre> v.to.db map=soils_general layer=1 option=area column=AREA @@ -60,7 +60,7 @@ <h3>Define color table based on attribute values</h3> <h3>Define color table stored as RGB values in attribute table</h3> -Write color values to the attribute table (column <tt>GRASSRGB</tt>) +Write color values to the attribute table (column <code>GRASSRGB</code>) instead of creating color table. <div class="code"><pre> @@ -145,7 +145,7 @@ <h2>SEE ALSO</h2> page <a href="https://grasswiki.osgeo.org/wiki/Color_tables">Color tables</a> (from GRASS User Wiki) -<p><a href="http://colorbrewer.org">ColorBrewer</a> is an online tool designed to +<p><a href="https://colorbrewer2.org">ColorBrewer</a> is an online tool designed to help people select good color schemes for maps and other graphics. <h2>AUTHORS</h2> diff --git a/vector/v.db.select/v.db.select.html b/vector/v.db.select/v.db.select.html index e75beb5336b..8447259be3b 100644 --- a/vector/v.db.select/v.db.select.html +++ b/vector/v.db.select/v.db.select.html @@ -16,7 +16,7 @@ <h4>Plain text</h4> table using the <em>v.db.select</em> GUI dialog. <p> -The individual fields (attribute values) are separated by a pipe (<tt>|</tt>) +The individual fields (attribute values) are separated by a pipe (<code>|</code>) which can be customized using the <b>separator</b> option. The records (rows) are separated by newlines. @@ -32,8 +32,8 @@ <h4>Plain text</h4> </pre></div> When escaping is enabled, the following characters in the fields are escaped: -backslash (<tt>\\</tt>), carriage return (<tt>\r</tt>), line feed (<tt>\n</tt>), -tabulator (<tt>\t</tt>), form feed (<tt>\f</tt>), and backslash (<tt>\b</tt>). +backslash (<code>\\</code>), carriage return (<code>\r</code>), line feed (<code>\n</code>), +tabulator (<code>\t</code>), form feed (<code>\f</code>), and backslash (<code>\b</code>). <p> No quoting or escaping is performed by default, so if these characters are in @@ -50,7 +50,7 @@ <h4>Plain text</h4> <h4>CSV</h4> CSV (comma-separated values) has many variations. This module by default produces -CSV with comma (<tt>,</tt>) as the field separator (delimiter). All text fields +CSV with comma (<code>,</code>) as the field separator (delimiter). All text fields (based on the type) are quoted with double quotes. Double quotes in fields are represented as two double quotes. Newline characters in the fields are present as-is in the output. Header is included by default containing column names. @@ -95,11 +95,11 @@ <h4>JSON</h4> tabulator, form feed, backslash, and double quote) for string values. Numbers in the database such as integers and doubles are represented as numbers, while texts (TEXT, VARCHAR, etc.) and dates in the database are represented -as strings in JSON. NULL values in database are represented as JSON <tt>null</tt>. +as strings in JSON. NULL values in database are represented as JSON <code>null</code>. Indentation and newlines in the output are minimal and not guaranteed. <p> -Records which are the result of the query are stored under key <tt>records</tt> +Records which are the result of the query are stored under key <code>records</code> as an array (list) of objects (collections of key-value pairs). The keys for attributes are lowercase or uppercase depending on how the columns were defined in the database. @@ -107,7 +107,7 @@ <h4>JSON</h4> <p> The JSON also contains information about columns stored under key <code>info</code>. Column names and types are under key <code>columns</code>. -Each colum has SQL data type under <code>sql_type</code> in all caps. +Each column has SQL data type under <code>sql_type</code> in all caps. A boolean <code>is_number</code> specifies whether the value is a number, i.e., integer or floating point number. The <code>is_number</code> value is aded for convenience and it is recommended to rely on the types derived @@ -174,7 +174,7 @@ <h4>Vertical plain text</h4> the vertical separator (<b>vertical_separator</b> option). <p> -Example with (horizontal) separator <tt>=</tt> and vertical separator <tt>newline</tt>: +Example with (horizontal) separator <code>=</code> and vertical separator <code>newline</code>: <div class="code"><pre> cat=1 diff --git a/vector/v.decimate/v.decimate.html b/vector/v.decimate/v.decimate.html index ceaae540ff1..9f7b0001307 100644 --- a/vector/v.decimate/v.decimate.html +++ b/vector/v.decimate/v.decimate.html @@ -8,16 +8,16 @@ <h2>DESCRIPTION</h2> Two main decimation techniques are: <ul> <li>count-based decimation (<b>skip</b>, <b>preserve</b>, <b>offset</b> - and <b>limit</b> options) -<li>grid-based decimation (<b>-g</b> flag) + and <b>limit</b> options)</li> +<li>grid-based decimation (<b>-g</b> flag)</li> </ul> <p> The grid-based decimation will remove points based on: <ul> -<li>similar z coordinates (<b>-z</b> flag and <b>zdiff</b> option) -<li>same categories (<b>-c</b> flag) -<li>count of points (<b>-f</b> flag and <b>cell_limit</b> option) +<li>similar z coordinates (<b>-z</b> flag and <b>zdiff</b> option)</li> +<li>same categories (<b>-c</b> flag)</li> +<li>count of points (<b>-f</b> flag and <b>cell_limit</b> option)</li> </ul> <p> @@ -49,8 +49,8 @@ <h2>DESCRIPTION</h2> Besides decimation, point count can be reduced by applying different selections or filters, these are: <ul> -<li>selection by category (<b>cats</b> option) -<li>selection by z values (<b>zrange</b> option) +<li>selection by category (<b>cats</b> option)</li> +<li>selection by z values (<b>zrange</b> option)</li> </ul> <h2>NOTES</h2> @@ -138,4 +138,4 @@ <h2>SEE ALSO</h2> <h2>AUTHOR</h2> -Vaclav Petras, <a href="http://geospatial.ncsu.edu/osgeorel/">NCSU OSGeoREL</a> +Vaclav Petras, <a href="https://geospatial.ncsu.edu/geoforall/">NCSU GeoForAll Lab</a> diff --git a/vector/v.delaunay/v.delaunay.html b/vector/v.delaunay/v.delaunay.html index 2c8b8db4531..de033403816 100644 --- a/vector/v.delaunay/v.delaunay.html +++ b/vector/v.delaunay/v.delaunay.html @@ -30,6 +30,8 @@ <h2>SEE ALSO</h2> <a href="v.voronoi.html">v.voronoi</a>, <a href="v.hull.html">v.hull</a> </em> +<p> +<a href="https://en.wikipedia.org/wiki/Delaunay_triangulation">Delaunay triangulation (Wikipedia)</a> <h2>AUTHORS</h2> diff --git a/vector/v.distance/v.distance.html b/vector/v.distance/v.distance.html index 2be5c1bd537..582887d23f3 100644 --- a/vector/v.distance/v.distance.html +++ b/vector/v.distance/v.distance.html @@ -152,10 +152,10 @@ <h3>Univariate statistics on results</h3> Create a vector map containing connecting lines and investigate mean distance to targets. An alternative solution is to use -the <tt>v.distance upload=dist</tt> option to upload distances into +the <code>v.distance upload=dist</code> option to upload distances into the <i>bugs</i> vector directly, then run v.univar on that. Also note -you can upload two columns at a time, e.g. <tt>v.distance -upload=cat,dist column=nearest_id,dist_to_nr</tt>. +you can upload two columns at a time, e.g. <code>v.distance +upload=cat,dist column=nearest_id,dist_to_nr</code>. <div class="code"><pre> # create working copy diff --git a/vector/v.edit/v.edit.html b/vector/v.edit/v.edit.html index 081deadf29c..f29b665841b 100644 --- a/vector/v.edit/v.edit.html +++ b/vector/v.edit/v.edit.html @@ -27,7 +27,7 @@ <h2>DESCRIPTION</h2> selects all features (and prints their id's to standard output) covered by two bounding boxes (center at 599505,4921010 and 603389.0625,4918292.1875, size 2*10000) with attribute -<tt>label='interstate'</tt>. +<code>label='interstate'</code>. <h2>NOTES</h2> @@ -46,8 +46,8 @@ <h3>Feature selection</h3> box, size defined by <b>threshold</b>)</li> <li><b>bbox</b> - using bounding box</li> <li><b>polygon</b> - using polygon (at least 3 coordinate pairs have to be set)</li> - <li><b>where</b> - using where statement (attribute data) - <li><b>query</b> - special query (e.g. minimal vector line length) + <li><b>where</b> - using where statement (attribute data)</li> + <li><b>query</b> - special query (e.g. minimal vector line length)</li> </ul> Additional parameters for vector feature specification are: @@ -108,7 +108,7 @@ <h3>Tool description</h3> <li><b>break</b> - Split given vector line or boundary into two lines on location given by <b>coords</b>. If <b>coords</b> not given, breaks all selected lines at each intersection (based - on <em><a href="v.clean.html">v.clean</a></em>, <tt>tool=break</tt>).</li> + on <em><a href="v.clean.html">v.clean</a></em>, <code>tool=break</code>).</li> <li><b>snap</b> - Snap vector features in given threshold. See also module <em><a href="v.clean.html">v.clean</a></em>. Note that this @@ -139,7 +139,7 @@ <h3>Tool description</h3> <li><b>chtype</b> - Change feature type of selected geometry objects. Points are converted to centroids, centroids to points, - lines to boundaries and boundaries to lines. + lines to boundaries and boundaries to lines.</li> <li><b>vertexadd</b> - Add vertex(ces) to the given vector lines or boundaries. Location of the new vertex is given by <b>coord</b> @@ -165,7 +165,7 @@ <h3>Tool description</h3> and <b>zbulk</b> parameter. Also input vector map must be 3D.</li> <li><b>select</b> - Print comma separated list of selected line - id's. No editing is done. + id's. No editing is done.</li> </ul> <h2>EXAMPLES</h2> @@ -181,7 +181,7 @@ <h3>Create new vector map</h3> Create new vector map and read data from file 'roads.txt': <div class="code"><pre> -v.out.ascii in=roads format=standard > roads.txt; +v.out.ascii in=roads format=standard > roads.txt; v.edit tool=create map=vectmap input=roads.txt </pre></div> diff --git a/vector/v.external.out/link.c b/vector/v.external.out/link.c index 58e01b035f5..da5ab52a56b 100644 --- a/vector/v.external.out/link.c +++ b/vector/v.external.out/link.c @@ -100,6 +100,7 @@ void make_link(const char *dsn_opt, const char *format, char *option_str, G_verbose_message(_("Switched to PostGIS format")); G_free_key_value(key_val); + G_free(dsn); } int parse_option_pg(const char *option, char **key, char **value) diff --git a/vector/v.external.out/v.external.out.html b/vector/v.external.out/v.external.out.html index f9c3575aa88..9241a6bda43 100644 --- a/vector/v.external.out/v.external.out.html +++ b/vector/v.external.out/v.external.out.html @@ -9,30 +9,30 @@ <h2>DESCRIPTION</h2> <h2>NOTES</h2> -Number of available output formats (<tt>v.external.out -f</tt>) +Number of available output formats (<code>v.external.out -f</code>) depends on OGR installation. 'PostgreSQL' format is presented also when GRASS comes with PostgreSQL support (check for '--with-postgres' -in <tt>g.version -b</tt> output). +in <code>g.version -b</code> output). <p> To store geometry and attribute data in PostGIS database ('PostgreSQL' format) GRASS uses built-in <em>GRASS-PostGIS data provider</em>. PostGIS data can be written also by OGR library -when <tt>GRASS_VECTOR_OGR</tt> environmental variable is defined or +when <code>GRASS_VECTOR_OGR</code> environmental variable is defined or GRASS is not compiled with PostgreSQL support. <p> Creation <b>options</b> refer to the output format specified by <b>format</b> option. See the list of valid creation options -at <a href="https://gdal.org/drivers/vector/">OGR formats +at <a href="https://gdal.org/en/stable/drivers/vector/">OGR formats specification page</a>, example -for <a href="https://gdal.org/drv_shapefile.html">ESRI +for <a href="https://gdal.org/en/stable/drivers/vector/shapefile.html">ESRI Shapefile</a> -or <a href="https://gdal.org/drv_pg.html">PostgreSQL/PostGIS</a> +or <a href="https://gdal.org/en/stable/drivers/vector/pg.html">PostgreSQL/PostGIS</a> format (section "Layer Creation Options"). Options are -comma-separated pairs (<tt>key=value</tt>), the options are +comma-separated pairs (<code>key=value</code>), the options are case-insensitive, -eg. <tt>options="SCHEMA=myschema,FID=cat"</tt>. +eg. <code>options="SCHEMA=myschema,FID=cat"</code>. <h3>PostgreSQL/PostGIS Creation Options</h3> @@ -40,39 +40,39 @@ <h3>PostgreSQL/PostGIS Creation Options</h3> (<b>format=PostgreSQL</b>) supports different creation <b>options</b> compared to PostgreSQL/PostGIS driver from OGR library: <ul> - <li><tt>SCHEMA=<name></tt> - name of schema where to create + <li><code>SCHEMA=<name></code> - name of schema where to create feature tables. If schema doesn't exists, it's automatically created when writing PostGIS data.</li> - <li><tt>FID=<column></tt> - name of column which will be used as - primary key (feature id), default: <tt>fid</tt></li> - <li><tt>GEOMETRY_NAME=<column></tt> name of column which will - be used for storing geometry data in feature table, default: <tt>geom</tt></li> - <li><tt>SPATIAL_INDEX=YES|NO</tt> - enable/disable spatial index on geometry column, default: YES</li> - <li><tt>PRIMARY_KEY=YES|NO</tt> - enable/disable primary key on FID column, default: YES</li> - <li><tt>SRID=<value></tt> - spatial reference identifier, + <li><code>FID=<column></code> - name of column which will be used as + primary key (feature id), default: <code>fid</code></li> + <li><code>GEOMETRY_NAME=<column></code> name of column which will + be used for storing geometry data in feature table, default: <code>geom</code></li> + <li><code>SPATIAL_INDEX=YES|NO</code> - enable/disable spatial index on geometry column, default: YES</li> + <li><code>PRIMARY_KEY=YES|NO</code> - enable/disable primary key on FID column, default: YES</li> + <li><code>SRID=<value></code> - spatial reference identifier, default: not defined</li> - <li><tt>TOPOLOGY=YES|NO</tt> - enable/disable + <li><code>TOPOLOGY=YES|NO</code> - enable/disable native <a href="https://grasswiki.osgeo.org/wiki/PostGIS_Topology">PostGIS topology</a>, default: NO</li> </ul> -Options relevant only to topological output (<tt>TOPOLOGY=YES</tt>): +Options relevant only to topological output (<code>TOPOLOGY=YES</code>): <ul> - <li><tt>TOPOSCHEMA_NAME=<schema name></tt> - name of PostGIS - Topology schema (relevant only for <tt>TOPOLOGY=YES</tt>), - default: <tt>topo_<input></tt></li> - <li><tt>TOPOGEOM_NAME=<column></tt> - name of column which + <li><code>TOPOSCHEMA_NAME=<schema name></code> - name of PostGIS + Topology schema (relevant only for <code>TOPOLOGY=YES</code>), + default: <code>topo_<input></code></li> + <li><code>TOPOGEOM_NAME=<column></code> - name of column which will be used for storing topogeometry data in feature table, - default: <tt>topo</tt></li> - <li><tt>TOPO_TOLERANCE=<value></tt> - tolerance for PostGIS + default: <code>topo</code></li> + <li><code>TOPO_TOLERANCE=<value></code> - tolerance for PostGIS Topology schema, - see <a href="http://www.postgis.net/docs/manual-2.0/CreateTopology.html">CreateTopology</a> - function for defails, default: <tt>0</tt></li> - <li><tt>TOPO_GEO_ONLY=YES|NO</tt> - store in PostGIS Topology schema - only data relevant to Topo-Geo data model, default: <tt>NO</tt></li> - <li><tt>SIMPLE_FEATURE=YES|NO</tt> - build simple features geometry - in <tt>GEOMETRY_NAME</tt> column from topogeometry data, default: + see <a href="https://postgis.net/docs/CreateTopology.html">CreateTopology</a> + function for defails, default: <code>0</code></li> + <li><code>TOPO_GEO_ONLY=YES|NO</code> - store in PostGIS Topology schema + only data relevant to Topo-Geo data model, default: <code>NO</code></li> + <li><code>SIMPLE_FEATURE=YES|NO</code> - build simple features geometry + in <code>GEOMETRY_NAME</code> column from topogeometry data, default: NO</li> </ul> @@ -122,7 +122,7 @@ <h3>PostGIS (simple features)</h3> # do some processing... </pre></div> -<i>Note:</i> If the environment variable <tt>GRASS_VECTOR_OGR</tt> +<i>Note:</i> If the environment variable <code>GRASS_VECTOR_OGR</code> is defined, or GRASS is compiled without PostgreSQL support then GRASS will use PostgreSQL driver from OGR library for reading and writing PostGIS data. @@ -181,7 +181,7 @@ <h2>REFERENCES</h2> <ul> <li><a href="https://trac.osgeo.org/grass/wiki/Grass7/VectorLib/OGRInterface">GRASS-OGR data provider</a></li> - <li><a href="https://gdal.org/api/">OGR vector library C API</a> documentation</li> + <li><a href="https://gdal.org/en/stable/api/">OGR vector library C API</a> documentation</li> <li><a href="https://trac.osgeo.org/grass/wiki/Grass7/VectorLib/PostGISInterface">GRASS-PostGIS data provider</a></li> <li><a href="https://www.postgresql.org/docs/9.1/static/libpq.html">libpq - C Library</a></li> </ul> diff --git a/vector/v.external/v.external.html b/vector/v.external/v.external.html index 200d3726363..da39db7dc0f 100644 --- a/vector/v.external/v.external.html +++ b/vector/v.external/v.external.html @@ -47,7 +47,7 @@ <h3>Supported OGR vector formats</h3> </pre></div> For details see -<a href="https://gdal.org/drivers/vector/">GDAL web site</a>. +<a href="https://gdal.org/en/stable/drivers/vector/">GDAL web site</a>. <h2>EXAMPLES</h2> @@ -64,7 +64,7 @@ <h3>PostGIS layers</h3> By default, PostGIS links are created by built-in PostGIS support, ie. using <em>GRASS-PostGIS data driver</em>. If the environment -variable <tt>GRASS_VECTOR_OGR</tt> exists, or GRASS is compiled +variable <code>GRASS_VECTOR_OGR</code> exists, or GRASS is compiled without PostgreSQL support then GRASS will use OGR-PostgreSQL driver for creating a link. @@ -124,7 +124,7 @@ <h3>Linking subset of features</h3> <h2>REFERENCES</h2> -<a href="https://gdal.org/api/">OGR vector library C +<a href="https://gdal.org/en/stable/api/">OGR vector library C API</a> documentation <h2>SEE ALSO</h2> @@ -142,7 +142,7 @@ <h2>SEE ALSO</h2> <p> <a href="https://gdal.org/">GDAL Library</a> <br> -<a href="http://postgis.org/">PostGIS</a> +<a href="https://postgis.net/">PostGIS</a> <p> See diff --git a/vector/v.hull/v.hull.html b/vector/v.hull/v.hull.html index e77a028e905..68a85a8ab15 100644 --- a/vector/v.hull/v.hull.html +++ b/vector/v.hull/v.hull.html @@ -54,9 +54,9 @@ <h2>REFERENCES</h2> <ul> <li>M. de Berg, M. van Kreveld, M. Overmars, O. Schwarzkopf, - (2000). Computational geometry, chapter 1.1, 2-8. + (2000). Computational geometry, chapter 1.1, 2-8.</li> <li>J. O'Rourke, (1998). Computational Geometry in C (Second - Edition), chapter 4. + Edition), chapter 4.</li> </ul> <h2>SEE ALSO</h2> diff --git a/vector/v.in.ascii/points.c b/vector/v.in.ascii/points.c index 0fd1899c2e2..ab1720f78d7 100644 --- a/vector/v.in.ascii/points.c +++ b/vector/v.in.ascii/points.c @@ -494,6 +494,9 @@ int points_to_bin(FILE *ascii, int rowlen, struct Map_info *Map, G_free_tokens(tokens); } G_percent(nrows, nrows, 2); + Vect_destroy_line_struct(Points); + Vect_destroy_cats_struct(Cats); + G_free(buf); return 0; } diff --git a/vector/v.in.ascii/v.in.ascii.html b/vector/v.in.ascii/v.in.ascii.html index cd1ab9c7cf1..b6f0b24aa1f 100644 --- a/vector/v.in.ascii/v.in.ascii.html +++ b/vector/v.in.ascii/v.in.ascii.html @@ -24,9 +24,9 @@ <h2>NOTES</h2> The input is read from the file specified by the <b>input</b> option or from standard input. -<p>The field separator may be a character, the word '<tt>tab</tt>' -(or '<tt>\t</tt>') for tab, '<tt>space</tt>' (or ' ') for a blank, -or '<tt>comma</tt>' (or ',') for a comma. +<p>The field separator may be a character, the word '<code>tab</code>' +(or '<code>\t</code>') for tab, '<code>space</code>' (or ' ') for a blank, +or '<code>comma</code>' (or ',') for a comma. <p>An attribute table is only created if it is needed, i.e. when at least one attribute column is present in the input file besides @@ -48,15 +48,15 @@ <h2>NOTES</h2> <p>If old version is requested, the <b>output</b> files from <em><a href="v.out.ascii.html">v.out.ascii</a></em> is placed in -the <tt>$LOCATION/$MAPSET/dig_ascii/</tt> -and <tt>$LOCATION/$MAPSET/dig_att</tt> directory. +the <code>$LOCATION/$MAPSET/dig_ascii/</code> +and <code>$LOCATION/$MAPSET/dig_att</code> directory. <h3>Import of files without category column</h3> If the input file does not contain a category column, there is the possibility to auto-generate these IDs (categories). To automatically add an additional column named 'cat', the <b>cat</b> parameter must be -set to the virtual column number 0 (<tt>cat=0</tt>). This is the +set to the virtual column number 0 (<code>cat=0</code>). This is the default action if the <b>cat</b> parameter is not set. <h3>Importing from a spreadsheet</h3> @@ -68,9 +68,9 @@ <h3>Importing from a spreadsheet</h3> contains any header lines, such as column headings, the <b>skip</b> parameter should be used. These skipped header lines will be written to the map's history file for later reference (read with -<tt>v.info -h</tt>). The skip option only works in <b>points</b> mode. +<code>v.info -h</code>). The skip option only works in <b>points</b> mode. -<p>Any line starting with the hash character ('<tt>#</tt>') will be treated as +<p>Any line starting with the hash character ('<code>#</code>') will be treated as a comment and skipped completely if located in the main data file. If located in the header, as defined by the <b>skip</b> parameter, it will be treated as a header line and written to the history file. @@ -82,7 +82,7 @@ <h3>Import of sexagesimal degree (degree, minutes, seconds, DMS)</h3> the positions are internally translated into decimal degrees during the import, the original DMS values are maintained in the attribute table. This requires both the latitude and the longitude columns to be -defined as <tt>varchar()</tt>, not as numbers. A warning will be +defined as <code>varchar()</code>, not as numbers. A warning will be issued which can be ignored. See <a href="vectorascii.html">GRASS ASCII vector format specification</a> for details. @@ -280,7 +280,7 @@ <h3>Example 8 - point format mode</h3> <h2>REFERENCES</h2> -<a href="sql.html">SQL command notes</a> for creating databases +<a href="sql.html">SQL command notes</a> for creating databases, <br> <a href="vectorascii.html">GRASS ASCII vector format</a> specification diff --git a/vector/v.in.db/v.in.db.html b/vector/v.in.db/v.in.db.html index a51fd936f93..00a81365acc 100644 --- a/vector/v.in.db/v.in.db.html +++ b/vector/v.in.db/v.in.db.html @@ -114,7 +114,7 @@ <h3>Creating a point map from DBF table for selected records only</h3> <div class="code"><pre> v.in.db driver=dbf database=/home/user/tables/ table=pointsfile x=x y=y z=z \ - key=idcol out=dtmpoints where="x NOT NULL and z > 100" + key=idcol out=dtmpoints where="x NOT NULL and z > 100" </pre></div> <h3>Creating a map from SQLite table</h3> diff --git a/vector/v.in.lidar/testsuite/mask_test.py b/vector/v.in.lidar/testsuite/mask_test.py index 407c02224a8..99244e72a23 100644 --- a/vector/v.in.lidar/testsuite/mask_test.py +++ b/vector/v.in.lidar/testsuite/mask_test.py @@ -9,6 +9,11 @@ for details. """ +import os + +from grass.gunittest.case import TestCase +from grass.gunittest.main import test + POINTS = """\ 17.46938776,18.67346939,1 20.93877551,17.44897959,2 @@ -77,11 +82,6 @@ """ -import os -from grass.gunittest.case import TestCase -from grass.gunittest.main import test - - class VectorMaskTest(TestCase): """Test case for watershed module diff --git a/vector/v.in.lidar/v.in.lidar.html b/vector/v.in.lidar/v.in.lidar.html index 72177b9f5ed..a71c02ee101 100644 --- a/vector/v.in.lidar/v.in.lidar.html +++ b/vector/v.in.lidar/v.in.lidar.html @@ -1,7 +1,7 @@ <h2>DESCRIPTION</h2> <em>v.in.lidar</em> converts LiDAR point clouds in LAS format to a GRASS -vector, using the <a href="http://www.liblas.org">libLAS</a> library. +vector, using the <a href="https://liblas.org">libLAS</a> library. The created vector is true 3D with x, y, z coordinates. <p> @@ -80,8 +80,8 @@ <h2>Project Creation</h2> information, and if the LAS driver supports it. If the source dataset CRS does not match the CRS of the current project (previously called location) <em>v.in.lidar</em> will -report an error message ("<tt>Coordinate reference system of dataset does not appear to -match current project</tt>") and then report the PROJ_INFO parameters of +report an error message ("<code>Coordinate reference system of dataset does not appear to +match current project</code>") and then report the PROJ_INFO parameters of the source dataset. <p>If the user wishes to ignore the difference between the coordinate @@ -105,30 +105,28 @@ <h2>NOTES</h2> <h2>EXAMPLE</h2> -This example is analogous to the example used in the GRASS wiki page for -<a href="https://grasswiki.osgeo.org/wiki/LIDAR#Import_LAS_as_vector_points">importing LAS as vector points</a>. -<p>The sample LAS data are in the file "Serpent Mound Model LAS Data.las", +The sample LAS data are in the file "Serpent Mound Model LAS Data.laz", available at -<a href="http://www.appliedimagery.com/downloads/sampledata/Serpent%20Mound%20Model%20LAS%20Data.las">appliedimagery.com</a> +<a href="https://github.com/PDAL/data/raw/4ee9ee43b195268a59113555908c1c0cdf955bd4/liblas/Serpent%20Mound%20Model%20LAS%20Data.laz">Serpent Mound Model LAS Data.laz</a> <div class="code"><pre> - # print LAS file info - v.in.lidar -p input="Serpent Mound Model LAS Data.las" +# print LAS file info +v.in.lidar -p input="Serpent Mound Model LAS Data.laz" - # create a project with CRS information of the LAS data - v.in.lidar -i input="Serpent Mound Model LAS Data.las" project=Serpent_Mound +# create a project with CRS information of the LAS data +v.in.lidar -i input="Serpent Mound Model LAS Data.laz" project=Serpent_Mound - # quit and restart GRASS in the newly created project "Serpent_Mound" - # real import of LiDAR LAS data, without topology and without attribute table - v.in.lidar -tb input="Serpent Mound Model LAS Data.las" output=Serpent_Mound_Model_pts +# quit and restart GRASS in the newly created project "Serpent_Mound" +# real import of LiDAR LAS data, without topology and without attribute table +v.in.lidar -tb input="Serpent Mound Model LAS Data.laz" output=Serpent_Mound_Model_pts </pre></div> <h2>REFERENCES</h2> <a href="https://www.asprs.org/committee-general/laser-las-file-format-exchange-activities.html"> ASPRS LAS format</a><br> -<a href="http://www.liblas.org/">LAS library</a> <br> -<a href="http://test.liblas.org/doxygen/liblas_8h.htm">LAS library C API</a> documentation +<a href="https://liblas.org/">LAS library</a> <br> +<a href="https://liblas.org/doxygen/liblas_8h.html">LAS library C API</a> documentation <h2>SEE ALSO</h2> diff --git a/vector/v.in.ogr/v.in.ogr.html b/vector/v.in.ogr/v.in.ogr.html index 3a461969663..e899a8a5701 100644 --- a/vector/v.in.ogr/v.in.ogr.html +++ b/vector/v.in.ogr/v.in.ogr.html @@ -23,11 +23,11 @@ <h3>Supported Vector Formats</h3> <em>v.in.ogr</em> uses the OGR library which supports various vector data formats including -<a href="https://gdal.org/drivers/vector/shapefile.html">ESRI Shapefile</a>, -<a href="https://gdal.org/drivers/vector/mitab.html">Mapinfo File</a>, UK .NTF, +<a href="https://gdal.org/en/stable/drivers/vector/shapefile.html">ESRI Shapefile</a>, +<a href="https://gdal.org/en/stable/drivers/vector/mitab.html">Mapinfo File</a>, UK .NTF, SDTS, TIGER, IHO S-57 (ENC), DGN, GML, GPX, AVCBin, REC, Memory, OGDI, and PostgreSQL, depending on the local OGR installation. For details -see the <a href="https://gdal.org/drivers/vector/">OGR format overview</a>. +see the <a href="https://gdal.org/en/stable/drivers/vector/">OGR format overview</a>. The <b>-f</b> prints a list of the vector formats supported by the system's OGR (Simple Features Library). The OGR (Simple Features Library) is part of the <a href="https://gdal.org">GDAL</a> library, @@ -142,7 +142,7 @@ <h3>File encoding</h3> ISO8859_1 which may not be appropriate for many languages. Unfortunately it is not clear what other values may be appropriate (see example below). To change encoding the user can set -up <tt><a href="https://gdal.org/user/configoptions.html">SHAPE_ENCODING</a></tt> +up <code><a href="https://gdal.org/en/stable/user/configoptions.html">SHAPE_ENCODING</a></code> environmental variable or simply to define encoding value using <b>encoding</b> parameter. @@ -257,7 +257,7 @@ <h3>PostGIS tables</h3> </pre></div> Generally, <em>v.in.ogr</em> just follows the -<a href="https://gdal.org/drivers/vector/">format-specific</a> +<a href="https://gdal.org/en/stable/drivers/vector/">format-specific</a> syntax defined by the OGR library. @@ -265,14 +265,14 @@ <h3>PostGIS tables</h3> <h3>Default connection settings as datasource (PostgreSQL only)</h3> If datasource (<b>input</b>) is specified as 'PG:' and the default DB -driver is <a href="grass-pg.html">PostgreSQL</a> (<tt>pg</tt>) than +driver is <a href="grass-pg.html">PostgreSQL</a> (<code>pg</code>) than the connection string is determined from the default DB settings, see examples below. <p> For schema support, first set a default schema with <em><a href="db.connect.html">db.connect</a></em>. If schema support -is used the schema name must be specified whenever a <tt>db.*</tt> +is used the schema name must be specified whenever a <code>db.*</code> module is called. User and password for connection to the database can be specified by <em><a href="db.login.html">db.login</a></em>. @@ -301,27 +301,27 @@ <h3>Default connection settings as datasource (PostgreSQL only)</h3> <h3>OpenStreetMap (OSM)</h3> -<a href="https://gdal.org/drivers/vector/osm.html">OSM data</a> are available in +<a href="https://gdal.org/en/stable/drivers/vector/osm.html">OSM data</a> are available in .osm (XML based) and .pbf (optimized binary) formats. The .pbf format is recommended because file sizes are smaller. The OSM driver will categorize features into 5 layers : <ul> - <li><b>points</b>: "node" features that have significant tags attached. - <li><b>lines</b>: "way" features that are recognized as non-area. + <li><b>points</b>: "node" features that have significant tags attached.</li> + <li><b>lines</b>: "way" features that are recognized as non-area.</li> <li><b>multilinestrings</b>: "relation" features that form a -multilinestring(type = 'multilinestring' or type = 'route'). +multilinestring(type = 'multilinestring' or type = 'route').</li> <li><b>multipolygons</b>: "relation" features that form a multipolygon (type = 'multipolygon' or type = 'boundary'), and "way" features that are -recognized as area. +recognized as area.</li> <li><b>other_relations</b>: "relation" features that do -not belong to any of the above layers. +not belong to any of the above layers.</li> </ul> It is recommended to import one layer at a time, and to select features with the <b>where</b> option, e.g. to import roads, use <div class="code"><pre> -v.in.ogr where="highway <> ''" +v.in.ogr where="highway >< ''" </pre></div> i.e. the OSM tag <em>highway</em> must be set. @@ -357,8 +357,8 @@ <h3>OpenStreetMap (OSM)</h3> <h3>Oracle Spatial</h3> -Note that you have to set the environment-variables <tt>ORACLE_BASE, -ORACLE_SID, ORACLE_HOME</tt> and <tt>TNS_ADMIN</tt> accordingly. +Note that you have to set the environment-variables <code>ORACLE_BASE, +ORACLE_SID, ORACLE_HOME</code> and <code>TNS_ADMIN</code> accordingly. <div class="code"><pre> v.in.ogr input=OCI:username/password@database_instance output=grasslayer layer=roads_oci @@ -461,7 +461,7 @@ <h2>REFERENCES</h2> <ul> <li><a href="https://gdal.org/">OGR vector library</a></li> - <li><a href="https://gdal.org/api/vector_c_api.html">OGR vector library C API</a> documentation</li> + <li><a href="https://gdal.org/en/stable/api/vector_c_api.html">OGR vector library C API</a> documentation</li> </ul> <h2>SEE ALSO</h2> diff --git a/vector/v.in.pdal/v.in.pdal.html b/vector/v.in.pdal/v.in.pdal.html index 51fc849a184..d5832c93cff 100644 --- a/vector/v.in.pdal/v.in.pdal.html +++ b/vector/v.in.pdal/v.in.pdal.html @@ -5,10 +5,10 @@ <h2>DESCRIPTION</h2> <em>v.in.pdal</em> supports the following filters: <ul> -<li>2D region filter -<li>Z coordinates filter -<li>return filter -<li>class filter +<li>2D region filter</li> +<li>Z coordinates filter</li> +<li>return filter</li> +<li>class filter</li> </ul> <h2>EXAMPLES</h2> @@ -27,8 +27,8 @@ <h2>REFERENCES</h2> V. Petras, A. Petrasova, J. Jeziorska, H. Mitasova (2016): <em>Processing UAV and lidar point clouds in GRASS GIS</em>. XXIII ISPRS Congress 2016 - [<a href="http://www.int-arch-photogramm-remote-sens-spatial-inf-sci.net/XLI-B7/945/2016/">ISPRS Archives</a>, - <a href="https://www.researchgate.net/publication/304340172_Processing_UAV_and_lidar_point_clouds_in_GRASS_GIS">ResearchGate</a>] + [<a href="https://doi.org/10.5194/isprs-archives-XLI-B7-945-2016">ISPRS Archives</a>, + <a href="https://www.researchgate.net/publication/304340172_Processing_UAV_and_lidar_point_clouds_in_GRASS_GIS">ResearchGate</a>]</li> </ul> <h2>SEE ALSO</h2> @@ -36,11 +36,11 @@ <h2>SEE ALSO</h2> <em> <a href="r.in.pdal.html">r.in.pdal</a>, <a href="g.region.html">g.region</a>, -<a href="v.vect.stats.html">v.vect.stats</a> -<a href="v.in.ogr.html">v.in.ogr</a>, +<a href="v.vect.stats.html">v.vect.stats</a>, +<a href="v.in.ogr.html">v.in.ogr</a> </em> <h2>AUTHOR</h2> Vaclav Petras, -<a href="http://geospatial.ncsu.edu/osgeorel/">NCSU GeoForAll Lab</a> +<a href="https://geospatial.ncsu.edu/geoforall/">NCSU GeoForAll Lab</a> diff --git a/vector/v.info/v.info.html b/vector/v.info/v.info.html index 05e7903ac2d..19549d2744b 100644 --- a/vector/v.info/v.info.html +++ b/vector/v.info/v.info.html @@ -77,7 +77,7 @@ <h3>Attribute columns for given layer</h3> <div class="code"><pre> v.info -c map=geology -Displaying column types/names for database connection of layer <1>: +Displaying column types/names for database connection of layer <1>: INTEGER|cat DOUBLE PRECISION|onemap_pro DOUBLE PRECISION|PERIMETER diff --git a/vector/v.kcv/v.kcv.html b/vector/v.kcv/v.kcv.html index 3e5b1b8a663..b82ae2d5bb7 100644 --- a/vector/v.kcv/v.kcv.html +++ b/vector/v.kcv/v.kcv.html @@ -47,8 +47,8 @@ <h2>EXAMPLES</h2> <h2>SEE ALSO</h2> <em> -<a href=v.random.html>v.random</a>, -<a href=g.region.html>g.region</a> +<a href="v.random.html">v.random</a>, +<a href="g.region.html">g.region</a> </em> <h2>AUTHORS</h2> diff --git a/vector/v.kernel/v.kernel.html b/vector/v.kernel/v.kernel.html index 3a5a0162129..9ef78fb20bd 100644 --- a/vector/v.kernel/v.kernel.html +++ b/vector/v.kernel/v.kernel.html @@ -1,11 +1,10 @@ <h2>DESCRIPTION</h2> <em>v.kernel</em> generates a raster density map from vector points -data using a moving -kernel. Available <a href="https://en.wikipedia.org/wiki/Kernel_(statistics)#Kernel_functions_in_common_use">kernel +data using a moving kernel. Available <a href="https://en.wikipedia.org/wiki/Kernel_(statistics)#Kernel_functions_in_common_use">kernel density functions</a> are <em>uniform, triangular, epanechnikov, -quartic, triweight, gaussian, cosine</em>, default -is <em>gaussian</em>. +quartic, triweight, gaussian, cosine</em>. The default function is <em>gaussian</em>. + <p>The module can also generate a vector density map on a vector network. Conventional kernel functions produce biased estimates by overestimating the densities around network nodes, whereas the equal split method of @@ -38,7 +37,7 @@ <h2>EXAMPLES</h2> </pre></div> <center> -<img src="v_kernel.png" alt="Density of schools" border=0><br> +<img src="v_kernel.png" alt="Density of schools" border="0"><br> School density </center> @@ -54,7 +53,7 @@ <h2>REFERENCES</h2> method for networks, its computational method and a GIS-based tool</i>. <b>International Journal of Geographical Information Science</b>, Vol 23(1), pp. 7-32.<br> -DOI: <a href="https://doi.org/10.1080/13658810802475491">10.1080/13658810802475491</a> +DOI: <a href="https://doi.org/10.1080/13658810802475491">10.1080/13658810802475491</a></li> </ul> <h2>SEE ALSO</h2> diff --git a/vector/v.label/v.label.html b/vector/v.label/v.label.html index 1f05cf845ff..f697a882a9a 100644 --- a/vector/v.label/v.label.html +++ b/vector/v.label/v.label.html @@ -12,7 +12,7 @@ <h2>NOTES</h2> as the <em>size</em> option. <p> A description of the labels file follows.<p> -The file is located in <tt>$MAPSET/paint/labels/</tt>. +The file is located in <code>$MAPSET/paint/labels/</code>. The file is a plain-text ASCII file containing the following fields: <h4>Caution: The following information may be incomplete, out of date, and wrong!</h4> @@ -24,8 +24,8 @@ <h4>Caution: The following information may be incomplete, out of date, and wrong <dd><!--Up to four lines of text.--> Lines in multiple line labels will appear one above the next. More than one line of text can be specified by notating the end of a -line with a '<b><tt>\n</tt></b>'.<br> -(e.g. <tt>SPEARFISH<b>\n</b>SOUTH DAKOTA)</tt>. +line with a '<b><code>\n</code></b>'.<br> +(e.g. <code>SPEARFISH<b>\n</b>SOUTH DAKOTA)</code>. <!-- I don't think it exists. -HB 6/2005 <dt><b>SKIP</b>: @@ -56,15 +56,15 @@ <h4>Caution: The following information may be incomplete, out of date, and wrong may be specified as: <div class="code"><pre> - lower left (lower left corner of the text) - lower right (lower right corner of the text) - lower center (bottom center of the text) + lower left (lower left corner of the text) + lower right (lower right corner of the text) + lower center (bottom center of the text) - upper left (upper left corner of the text) - upper right (upper right corner of the text) - upper center (top center of the text) + upper left (upper left corner of the text) + upper right (upper right corner of the text) + upper center (top center of the text) - center (center of the text) + center (center of the text) </pre></div> @@ -97,21 +97,19 @@ <h4>Caution: The following information may be incomplete, out of date, and wrong Alternatively <em>fontsize</em> can set the font size in normal font points. -<dt><A NAME="textcolor"><b>TEXT COLOR</b></a>: +<dt><a name="textcolor"><b>TEXT COLOR</b></a>: <dd>This selects the text color. If unspecified, the label's text is drawn in <em>black</em>, by default. The text color can be specified in one of several ways: <ol> -<li>By color name: - -<br> -<tt>aqua black blue brown cyan gray green grey indigo -magenta orange purple red violet white yellow</tt> +<li>By color name:<br> +<code>aqua black blue brown cyan gray green grey indigo +magenta orange purple red violet white yellow</code></li> <li>As red, green, blue component values. (0-255)<br> -for example: <tt>128:100:200</tt> +for example: <code>128:100:200</code></li> <!-- eh? <li>As red, green, blue percentages. @@ -126,7 +124,7 @@ <h4>Caution: The following information may be incomplete, out of date, and wrong <em><a href="d.labels.html">d.labels</a></em>.) --> -<li>Specify "<tt>none</tt>" to suppress the lettering. +<li>Specify "<code>none</code>" to suppress the lettering.</li> </ol> @@ -141,9 +139,9 @@ <h4>Caution: The following information may be incomplete, out of date, and wrong appears to be in two colors. The text is drawn first in this color at a wider line width, and then redrawn in the text color at the regular line width. No highlight color -("<tt>none</tt>") is used by default, if unspecified by the +("<code>none</code>") is used by default, if unspecified by the user. To specify use of no highlight color, specify -"<tt>none</tt>". +"<code>none</code>". (See <a href="#textcolor">TEXT COLOR</a> above for a list of permissible color names.) @@ -159,7 +157,7 @@ <h4>Caution: The following information may be incomplete, out of date, and wrong <dt><b>BACKGROUND COLOR</b>: <dd>Text may be boxed in a solid color by specifying a background color. -Specify "<tt>none</tt>" for no background. The default background color +Specify "<code>none</code>" for no background. The default background color setting, if unspecified by the user, is <em>white</em>. (See <a href="#textcolor">TEXT COLOR</a> above for a list of permissible color names.) @@ -168,7 +166,7 @@ <h4>Caution: The following information may be incomplete, out of date, and wrong <dt><b>BORDER COLOR</b>: <dd>Select a color for the border around the background. -Specify "<tt>none</tt>" to suppress the border. +Specify "<code>none</code>" to suppress the border. The default border color used, if unspecified, is <em>black</em>. (See <a href="#textcolor">TEXT COLOR</a> above for a list of permissible color names.) @@ -199,11 +197,11 @@ <h2>EXAMPLE</h2> Since the label files are simple text files, you can merge them together if you like. For example if you set the label colors based on database attributes using multiple runs with the <b>where</b> option. -This example uses the standard UNIX <tt>cat</tt> program. +This example uses the standard UNIX <code>cat</code> program. <div class="code"><pre> cd $MAPSET/paint/labels/ -cat file1 file2 file3 file4 > file_all +cat file1 file2 file3 file4 > file_all </pre></div> <h2>SEE ALSO</h2> diff --git a/vector/v.lidar.correction/v.lidar.correction.html b/vector/v.lidar.correction/v.lidar.correction.html index 5e45edc02f6..29d1bab645d 100644 --- a/vector/v.lidar.correction/v.lidar.correction.html +++ b/vector/v.lidar.correction/v.lidar.correction.html @@ -90,7 +90,7 @@ <h2>REFERENCES</h2> <br> <br> Performances of the filter can be seen in the -<a href="http://www.itc.nl/isprswgIII-3/filtertest/MainDoc.htm">ISPRS WG III/3 Comparison of Filters</a> +<a href="https://www.itc.nl/isprs/wgIII-3/filtertest/">ISPRS WG III/3 Comparison of Filters</a> report by Sithole, G. and Vosselman, G., 2003. <h2>SEE ALSO</h2> diff --git a/vector/v.lidar.edgedetection/v.lidar.edgedetection.html b/vector/v.lidar.edgedetection/v.lidar.edgedetection.html index 08fd672d266..77f0efe6e0b 100644 --- a/vector/v.lidar.edgedetection/v.lidar.edgedetection.html +++ b/vector/v.lidar.edgedetection/v.lidar.edgedetection.html @@ -142,7 +142,7 @@ <h2>REFERENCES</h2> <li>Brovelli M. A., Cannata M. and Longoni U.M., 2002. DTM LIDAR in area urbana, Bollettino SIFET N.2, pp. 7-26.</li> <li>Performances of the filter can be seen in the -<a href="http://www.itc.nl/isprswgIII-3/filtertest/MainDoc.htm">ISPRS WG III/3 Comparison of Filters</a> +<a href="https://www.itc.nl/isprs/wgIII-3/filtertest/">ISPRS WG III/3 Comparison of Filters</a> report by Sithole, G. and Vosselman, G., 2003.</li> </ul> diff --git a/vector/v.lidar.growing/v.lidar.growing.html b/vector/v.lidar.growing/v.lidar.growing.html index d98309eb59c..58c76c59fb2 100644 --- a/vector/v.lidar.growing/v.lidar.growing.html +++ b/vector/v.lidar.growing/v.lidar.growing.html @@ -68,7 +68,7 @@ <h2>REFERENCES</h2> Bollettino SIFET N.2, pp. 7-26. <p> Performances of the filter can be seen in the -<a href="http://www.itc.nl/isprswgIII-3/filtertest/MainDoc.htm">ISPRS WG III/3 Comparison of Filters</a> +<a href="https://www.itc.nl/isprs/wgIII-3/filtertest/">ISPRS WG III/3 Comparison of Filters</a> report by Sithole, G. and Vosselman, G., 2003. <h2>SEE ALSO</h2> diff --git a/vector/v.lrs/lrs.html b/vector/v.lrs/lrs.html index 027f72d44ce..63d99b6c1fd 100644 --- a/vector/v.lrs/lrs.html +++ b/vector/v.lrs/lrs.html @@ -70,8 +70,8 @@ <h3>Double referenced system</h3> must be entered to the system and it is done by optional MP attributes: <ul> -<li>end_mp - end MP -<li>end_off - end offset +<li>end_mp - end MP</li> +<li>end_off - end offset</li> </ul> In this case original MP on km 4 will have these attributes: <div class="code"><pre> @@ -100,7 +100,7 @@ <h3>Double referenced system</h3> <h3>LRS table structure</h3> -<table border=1> +<table border="1"> <tr><td><b>Attribute</b></td><td><b>Type</b></td><td><b>Description</b></td></tr> <tr><td>rsid</td><td> integer</td><td> reference segment ID, unique in the table</td></tr> <tr><td>lcat</td><td> integer</td><td> category of the line in the LRS map</td></tr> @@ -116,12 +116,12 @@ <h3>LRS table structure</h3> <h3>Available commands</h3> <ul> -<li><a href="v.lrs.create.html">v.lrs.create</a> to create a linear referencing system, -<li><a href="v.lrs.label.html">v.lrs.label</a> to create stationing on the LRS, +<li><a href="v.lrs.create.html">v.lrs.create</a> to create a linear referencing system,</li> +<li><a href="v.lrs.label.html">v.lrs.label</a> to create stationing on the LRS,</li> <li><a href="v.lrs.segment.html">v.lrs.segment</a> to create points/segments on LRS, - and + and</li> <li><a href="v.lrs.where.html">v.lrs.where</a> to find line id and real km+offset -for given points in vector map using linear referencing system. +for given points in vector map using linear referencing system.</li> </ul> <h3>Input lines for v.lrs.segment and v.lrs.label</h3> @@ -157,24 +157,26 @@ <h2>NOTES</h2> Explanations of selected options: <ul> <li>llayer: vector layer in line map (usually 1; see <a href="vectorintro.html">vectorintro</a> - for "layer" concept) + for "layer" concept)</li> <li>player: vector layer in point map (usually 1; see <a href="vectorintro.html">vectorintro</a> - for "layer" concept) -<li>rsdriver: Driver name for LRS table - DBMI SQL driver (dbf, pg, mysql, sqlite, etc) -<li>rsdatabase: Database name for LRS table - DBMI SQL database name (e.g., "lrsdb") -<li>rstable: Name of the LRS table - DBMI SQL table name (e.g., "streamslrs") + for "layer" concept)</li> +<li>rsdriver: Driver name for LRS table - DBMI SQL driver (dbf, pg, mysql, sqlite, etc)</li> +<li>rsdatabase: Database name for LRS table - DBMI SQL database name (e.g., "lrsdb")</li> +<li>rstable: Name of the LRS table - DBMI SQL table name (e.g., "streamslrs")</li> </ul> <h2>SEE ALSO</h2> -<em>R. Blazek, 2004, <a href="http://gisws.media.osaka-cu.ac.jp/grass04/viewpaper.php?id=50">Introducing the Linear Reference System in GRASS</a>, Bangkok, GRASS User Conf. Proc.</em><br> -<em>R. Blazek, 2005, <a href="http://creativecity.gscc.osaka-cu.ac.jp/IJG/article/download/320/321">Introducing the Linear Reference System in GRASS</a>, International Journal of Geoinformatics, Vol. 1(3), pp. 95-100</em><br> +<em>R. Blazek, 2004, <a href="https://foss4g.asia/2004/Full%20Paper_PDF/Introducing%20the%20Linear%20Reference%20System%20in%20GRASS.pdf">Introducing the Linear Reference System in GRASS</a>, Bangkok, GRASS User Conf. Proc.</em><br> +<em>R. Blazek, 2005, <a href="https://web.archive.org/web/20240814152234/http://creativecity.gscc.osaka-cu.ac.jp/IJG/article/download/320/321">Introducing the Linear Reference System in GRASS</a>, International Journal of Geoinformatics, Vol. 1(3), pp. 95-100</em><br> <p> -<em><a href="v.build.polylines.html">v.build.polylines</a></em>, -<em><a href="v.lrs.create.html">v.lrs.create</a></em>, -<em><a href="v.lrs.segment.html">v.lrs.segment</a></em>, -<em><a href="v.lrs.where.html">v.lrs.where</a></em>, -<em><a href="v.lrs.label.html">v.lrs.label</a></em> +<em> +<a href="v.build.polylines.html">v.build.polylines</a>, +<a href="v.lrs.create.html">v.lrs.create</a>, +<a href="v.lrs.segment.html">v.lrs.segment</a>, +<a href="v.lrs.where.html">v.lrs.where</a>, +<a href="v.lrs.label.html">v.lrs.label</a> +</em> <h2>AUTHORS</h2> diff --git a/vector/v.lrs/v.lrs.create/v.lrs.create.html b/vector/v.lrs/v.lrs.create/v.lrs.create.html index 518bb80fedf..eb969065186 100644 --- a/vector/v.lrs/v.lrs.create/v.lrs.create.html +++ b/vector/v.lrs/v.lrs.create/v.lrs.create.html @@ -150,7 +150,7 @@ <h2>SEE ALSO</h2> <p> <em> <a href="lrs.html">LRS tutorial</a>,<br> -<a href="http://gisws.media.osaka-cu.ac.jp/grass04/viewpaper.php?id=50">Introducing the Linear Reference System in GRASS</a> +<a href="https://foss4g.asia/2004/Full%20Paper_PDF/Introducing%20the%20Linear%20Reference%20System%20in%20GRASS.pdf">Introducing the Linear Reference System in GRASS</a> </em> <h2>AUTHORS</h2> diff --git a/vector/v.lrs/v.lrs.label/v.lrs.label.html b/vector/v.lrs/v.lrs.label/v.lrs.label.html index dbd2f64ac67..fb96858b486 100644 --- a/vector/v.lrs/v.lrs.label/v.lrs.label.html +++ b/vector/v.lrs/v.lrs.label/v.lrs.label.html @@ -33,7 +33,7 @@ <h2>SEE ALSO</h2> <p> <em> <a href="lrs.html">LRS tutorial</a>,<br> -<a href="http://gisws.media.osaka-cu.ac.jp/grass04/viewpaper.php?id=50">Introducing the Linear Reference System in GRASS</a> +<a href="https://foss4g.asia/2004/Full%20Paper_PDF/Introducing%20the%20Linear%20Reference%20System%20in%20GRASS.pdf">Introducing the Linear Reference System in GRASS</a> </em> <h2>AUTHORS</h2> diff --git a/vector/v.lrs/v.lrs.segment/v.lrs.segment.html b/vector/v.lrs/v.lrs.segment/v.lrs.segment.html index 0a2b39dfd1f..259b54ae048 100644 --- a/vector/v.lrs/v.lrs.segment/v.lrs.segment.html +++ b/vector/v.lrs/v.lrs.segment/v.lrs.segment.html @@ -1,7 +1,7 @@ <h2>DESCRIPTION</h2> <em>v.lrs.segment</em> creates points/segments from input lines, -linear reference system and positions read from <tt>standard in</tt> +linear reference system and positions read from <code>standard in</code> or a file. <p> The format is as follows:<br> @@ -56,7 +56,7 @@ <h2>SEE ALSO</h2> <p> <em> <a href="lrs.html">LRS tutorial</a>,<br> -<a href="http://gisws.media.osaka-cu.ac.jp/grass04/viewpaper.php?id=50">Introducing the Linear Reference System in GRASS</a> +<a href="https://foss4g.asia/2004/Full%20Paper_PDF/Introducing%20the%20Linear%20Reference%20System%20in%20GRASS.pdf">Introducing the Linear Reference System in GRASS</a> </em> <h2>AUTHOR</h2> diff --git a/vector/v.lrs/v.lrs.where/v.lrs.where.html b/vector/v.lrs/v.lrs.where/v.lrs.where.html index cdf9651f389..e8c81834f8e 100644 --- a/vector/v.lrs/v.lrs.where/v.lrs.where.html +++ b/vector/v.lrs/v.lrs.where/v.lrs.where.html @@ -45,7 +45,7 @@ <h2>SEE ALSO</h2> <p> <em> <a href="lrs.html">LRS tutorial</a>,<br> -<a href="http://gisws.media.osaka-cu.ac.jp/grass04/viewpaper.php?id=50">Introducing the Linear Reference System in GRASS</a> +<a href="https://foss4g.asia/2004/Full%20Paper_PDF/Introducing%20the%20Linear%20Reference%20System%20in%20GRASS.pdf">Introducing the Linear Reference System in GRASS</a> </em> <h2>AUTHORS</h2> diff --git a/vector/v.mkgrid/v.mkgrid.html b/vector/v.mkgrid/v.mkgrid.html index 8d5a6900fff..3b9433f761d 100644 --- a/vector/v.mkgrid/v.mkgrid.html +++ b/vector/v.mkgrid/v.mkgrid.html @@ -66,8 +66,8 @@ <h3>Creating a positioned grid in a latitude-longitude</h3> at 167deg 52min east, 47deg 6min south. For use with e.g. QGIS you can then pull this grid into a project with projected coordinate reference system (CRS) using <em>v.proj</em> before -exporting as a Shapefile with <em>v.out.ogr</em> (within GRASS GIS you could -just use <em>d.grid -w</em> from the projec with projected CRS for the same effect): +exporting as a vector file with <em>v.out.ogr</em> (within GRASS GIS you could +just use <em>d.grid -w</em> from the project with projected CRS for the same effect): <div class="code"><pre> v.mkgrid map=p2min_grid grid=10,12 position=coor coordinates=167:52E,47:06S box=0:02,0:02 @@ -153,22 +153,17 @@ <h3>Using hexagons for point density</h3> v.vect.stats points=points_of_interest areas=hexagons count_column=count </pre></div> -User should note that some of the points may be outside the grid +Users should note that some of the points may be outside the grid since the hexagons cannot cover all the area around the edges (the computational region extent needs to be enlarged if all points should be considered). -The last command sets the vector map color table to <tt>viridis</tt> -based on the <tt>count</tt> column. +The last command sets the vector map color table to <code>viridis</code> +based on the <code>count</code> column. <div class="code"><pre> v.colors map=hexagons use=attr column=count color=viridis </pre></div> -<!-- -d.erase 77:77:77 -mogrify -trim .png ---> - <center> <img src="v_mkgrid.png"><br> Point density in a hexagonal grid diff --git a/vector/v.net.alloc/v.net.alloc.html b/vector/v.net.alloc/v.net.alloc.html index 2a1f0efd1cb..81dcd45a9d4 100644 --- a/vector/v.net.alloc/v.net.alloc.html +++ b/vector/v.net.alloc/v.net.alloc.html @@ -190,8 +190,8 @@ <h2>EXAMPLES</h2> v.db.update map=streets_hospitals column=FT_COST value=-1 where="ONE_WAY = 'TF'" # add costs to newly created lines -v.db.update map=streets_hospitals column=TF_COST value=0 where="cat > 49746" -v.db.update map=streets_hospitals column=FT_COST value=0 where="cat > 49746" +v.db.update map=streets_hospitals column=TF_COST value=0 where="cat > 49746" +v.db.update map=streets_hospitals column=FT_COST value=0 where="cat > 49746" # from centers v.net.alloc in=streets_hospitals out=streets_hospitals_alloc_from center_cats=1-10000 arc_column=FT_COST arc_backward_column=TF_COST diff --git a/vector/v.net.allpairs/v.net.allpairs.html b/vector/v.net.allpairs/v.net.allpairs.html index 8f65be7a356..5bd5bc45d85 100644 --- a/vector/v.net.allpairs/v.net.allpairs.html +++ b/vector/v.net.allpairs/v.net.allpairs.html @@ -50,8 +50,8 @@ <h2>EXAMPLE</h2> <h2>SEE ALSO</h2> <em> -<a href="v.net.path">v.net.path</a>, -<a href="v.net.distance">v.net.distance</a> +<a href="v.net.path.html">v.net.path</a>, +<a href="v.net.distance.html">v.net.distance</a> </em> <h2>AUTHORS</h2> diff --git a/vector/v.net.bridge/v.net.bridge.html b/vector/v.net.bridge/v.net.bridge.html index accd2484a33..7f871ae8a1a 100644 --- a/vector/v.net.bridge/v.net.bridge.html +++ b/vector/v.net.bridge/v.net.bridge.html @@ -26,12 +26,6 @@ <h2>NOTES</h2> An articulation point in graph theory is an articulation <em>node</em> in GRASS terminology. -<h2>EXAMPLES</h2> - -<div class="code"><pre> - TBD -</pre></div> - <h2>SEE ALSO</h2> <em> diff --git a/vector/v.net.iso/v.net.iso.html b/vector/v.net.iso/v.net.iso.html index 28c92571cbb..edc3450e173 100644 --- a/vector/v.net.iso/v.net.iso.html +++ b/vector/v.net.iso/v.net.iso.html @@ -89,7 +89,7 @@ <h4>Subdivision of a network using distance:</h4> #1 0 - < 1000 #2 1000 - < 2000 #3 2000 - < 5000 -#4 >= 5000 +#4 >= 5000 </pre></div> To display the result, run for example: @@ -162,7 +162,7 @@ <h4>Subdivision of a network using traveling time:</h4> v.db.update map=myroads_net_iso_time layer=1 column=trav_time value="0 - 1" where="cat = 1" v.db.update map=myroads_net_iso_time layer=1 column=trav_time value="1 - 2" where="cat = 2" v.db.update map=myroads_net_iso_time layer=1 column=trav_time value="2 - 5" where="cat = 3" -v.db.update map=myroads_net_iso_time layer=1 column=trav_time value="> 5" where="cat = 4" +v.db.update map=myroads_net_iso_time layer=1 column=trav_time value="> 5" where="cat = 4" # colors # cats=1: blue v.db.update map=myroads_net_iso_time layer=1 column=GRASSRGB value="000:000:255" where="cat = 1" diff --git a/vector/v.net.path/v.net.path.html b/vector/v.net.path/v.net.path.html index 9af5842495a..216f35b103f 100644 --- a/vector/v.net.path/v.net.path.html +++ b/vector/v.net.path/v.net.path.html @@ -16,16 +16,16 @@ <h2>DESCRIPTION</h2> and also different costs for both directions of a vector line. For areas, costs will be calculated along boundary lines. <p> -The input vector needs to be prepared with <tt>v.net operation=connect</tt> +The input vector needs to be prepared with <code>v.net operation=connect</code> in order to connect points representing center nodes to the network. -<p>Nodes and arcs can be closed using <tt>cost = -1</tt>. +<p>Nodes and arcs can be closed using <code>cost = -1</code>. <p>Least cost paths are written to the output vector map with an attached attribute table. <p>Nodes can be <ul> -<li> piped into the program from file or from stdin, or -<li> defined in the graphical user interface ("enter values interactively"). +<li> piped into the program from file or from stdin, or</li> +<li> defined in the graphical user interface ("enter values interactively").</li> </ul> The syntax is as follows: @@ -43,7 +43,7 @@ <h2>DESCRIPTION</h2> <p> Points specified by category must be exactly on network nodes, and the -input vector map needs to be prepared with <tt>v.net operation=connect</tt>. +input vector map needs to be prepared with <code>v.net operation=connect</code>. <p> When specifying coordinates, the next network node to a given coordinate pair is used. @@ -52,19 +52,19 @@ <h2>DESCRIPTION</h2> The attribute table will contain the following attributes: <ul> - <li><tt>cat</tt> - path unique category assigned by module</li> - <li><tt>id</tt> - path id (read from input)</li> - <li><tt>fcat</tt> - from point category</li> - <li><tt>tcat</tt> - to point category</li> - <li><tt>sp</tt> - result status: + <li><code>cat</code> - path unique category assigned by module</li> + <li><code>id</code> - path id (read from input)</li> + <li><code>fcat</code> - from point category</li> + <li><code>tcat</code> - to point category</li> + <li><code>sp</code> - result status: <ul> <li> 0 - OK, path found</li> <li> 1 - node is not reachable</li> <li> 2 - point of given category does not exist</li> - </ul> - <li><tt>cost</tt> - travelling costs (on the network, not to/from network)</li> - <li><tt>fdist</tt> - the distance from first point to the network</li> - <li><tt>tdist</tt> - the distance from the network to second point</li> + </ul></li> + <li><code>cost</code> - travelling costs (on the network, not to/from network)</li> + <li><code>fdist</code> - the distance from first point to the network</li> + <li><code>tdist</code> - the distance from the network to second point</li> </ul> <p> @@ -84,7 +84,7 @@ <h2>DESCRIPTION</h2> <h2>NOTES</h2> -Nodes and arcs can be closed using <tt>cost = -1</tt>. +Nodes and arcs can be closed using <code>cost = -1</code>. <p>If the cost columns <b>arc_column</b>, <b>arc_backward_column</b> and <b>node_column</b> are not specified, the length of network segments is measured and zero costs are assumed for nodes. @@ -92,7 +92,7 @@ <h2>NOTES</h2> accurate results, the line length must be taken into account when assigning costs as attributes. For example, to get the <em>fastest path</em>, the columns 'max_speed' and 'length' are required. The correct fastest -path can then be found by specifying <tt>arc_column=length/max_speed</tt>. If not yet +path can then be found by specifying <code>arc_column=length/max_speed</code>. If not yet existing, the column containing the line length ("length") has to added to the attributes table using <em><a href="v.to.db.html">v.to.db</a></em>. diff --git a/vector/v.net.salesman/v.net.salesman.html b/vector/v.net.salesman/v.net.salesman.html index 0f29c69e857..00153281daf 100644 --- a/vector/v.net.salesman/v.net.salesman.html +++ b/vector/v.net.salesman/v.net.salesman.html @@ -3,7 +3,7 @@ <h2>DESCRIPTION</h2> <em>v.net.salesman</em> calculates the optimal route to visit nodes on a vector network. -<p>Costs may be either line lengths, or attributes saved in a database +<p>Costs may be either line lengths or attributes saved in a database table. These attribute values are taken as costs of whole segments, not as costs to traverse a length unit (e.g. meter) of the segment. For example, if the speed limit is 100 km / h, the cost to traverse a @@ -14,16 +14,16 @@ <h2>DESCRIPTION</h2> Supported are cost assignments for arcs, and also different costs for both directions of a vector line. For areas, costs will be calculated along boundary lines. + <p>The input vector needs to be prepared with <em>v.net operation=connect</em> in order to connect points representing center nodes to the network. <p>Points specified by category must be exactly on network nodes, and the input vector map needs to be prepared with <em>v.net operation=connect</em>. -<p>Application of flag <b>-t</b> enables a turntable support. -This flag requires additional parameters <b>turn_layer</b> and <b>turn_cat_layer</b> -that are otherwise ignored. - The turntable allows +<p>The flag <b>-t</b> enables turntable support. +This flag requires additional parameters, <b>turn_layer</b> and <b>turn_cat_layer</b>, +that are otherwise ignored. The turntable allows to model e.g. traffic code, where some turns may be prohibited. This means that the input layer is expanded by turntable with costs of every possible turn on any possible node @@ -161,7 +161,7 @@ <h2>AUTHORS</h2> <h3>TURNS SUPPORT</h3> -The turns support was implemnented as part of GRASS GIS turns cost project +The turns support was implemented as part of GRASS GIS turns cost project at Czech Technical University in Prague, Czech Republic.<br> Eliska Kyzlikova, Stepan Turek, Lukas Bocan and Viera Bejdova participated in the project. diff --git a/vector/v.net.timetable/v.net.timetable.html b/vector/v.net.timetable/v.net.timetable.html index e990ca5d8f1..3540f93e08f 100644 --- a/vector/v.net.timetable/v.net.timetable.html +++ b/vector/v.net.timetable/v.net.timetable.html @@ -9,7 +9,7 @@ <h2>DESCRIPTION</h2> PATH_ID FROM_X FROM_Y TO_X TO_Y START_TIME MIN_CHANGE MAX_CHANGES WALK_CHANGE PATH_ID FROM_STOP TO_STOP START_TIME MIN_CHANGE MAX_CHANGES WALK_CHANGE </pre></div> -where PATH_ID is the identificator of a query that is used in the +where PATH_ID is the identifier of a query that is used in the output map to differentiate between queries. Search begins at START_TIME. MIN_CHANGE gives the minimum number of time (inclusively) for a change from one route to another. MAX_CHANGES @@ -42,7 +42,7 @@ <h2>DESCRIPTION</h2> 3|47|300|3|24|24 </pre></div> where CAT is the category of a point in the map, PATH_ID is the path -identificator, STOP_ID is the identificator of the stop as used in +identifier, STOP_ID is the identifier of the stop as used in the input map, INDEX is the index of the stop on the path (i.e, index=1 is the first stop visited, ...) and ARR_TIME and DEP_TIME denote the arrival time and departure time respectively. Arrival @@ -51,14 +51,14 @@ <h2>DESCRIPTION</h2> time. <br> The table linked to the second layer corresponds to subroutes taken -between stops. The following table is obtainedd for the above query: +between stops. The following table is obtained for the above query: <div class="code"><pre> cat|path_id|from_id|to_id|route_id|index|from_time|to_time 1|47|130|250|15|1|15|22 2|47|250|300|-1|2|22|24 </pre></div> where CAT is the category of lines of subroute between stops FROM_ID -to TO_ID, ROUTE_ID is the identificator of the route taken or -1 if +to TO_ID, ROUTE_ID is the identifier of the route taken or -1 if walking, INDEX and PATH_ID are as above and FROM_TIME and TO_TIME denote the times between which the route is taken. <br> @@ -67,7 +67,7 @@ <h2>DESCRIPTION</h2> is added between two corresponding points. Finally, instead of straight line segment, the actual paths of routes can be given in <b> paths</b> layer. If this parameter is used then each line in the -input map must contain identificators as category numbers of all +input map must contain identifiers as category numbers of all routes passing through the line. The module then finds the path between two stops and writes this path instead. In case of walking from one stop to another, straight line between the stops is used. @@ -83,7 +83,7 @@ <h2>NOTES</h2> still needs to be a separate route for every time. For each stop (given by the category number of the point) the table storing information about the routes must contain the list of all routes -stopping at the stop(given by route identificators) together with +stopping at the stop(given by route identifiers) together with arrival times. That is, the table must contain three columns: stop - which is the key of the table, <b>route_id</b> and <b>stop_time</b> where each triple corresponds to a route arriving to a stop and a @@ -122,7 +122,7 @@ <h2>NOTES</h2> <h2>EXAMPLES</h2> -To find a path from stop with identificator 130 to stop with +To find a path from stop with identifier 130 to stop with category 300, starting at time 0, with one time unit for change, maximum of 5 changes and with walking not considered a change of route, we use the following command: diff --git a/vector/v.net/arcs.c b/vector/v.net/arcs.c index 6d18f760bfd..20554fc19f9 100644 --- a/vector/v.net/arcs.c +++ b/vector/v.net/arcs.c @@ -68,6 +68,7 @@ int create_arcs(FILE *file, struct Map_info *Pnts, struct Map_info *Out, Vect_destroy_line_struct(points); Vect_destroy_cats_struct(cats); + Vect_destroy_line_struct(points2); return narcs; } diff --git a/vector/v.net/report.c b/vector/v.net/report.c index 952acd5dfff..bce9baa3c5a 100644 --- a/vector/v.net/report.c +++ b/vector/v.net/report.c @@ -128,6 +128,9 @@ int report(struct Map_info *In, int afield, int nfield, int action) } } } + Vect_destroy_cats_struct(Cats); + Vect_destroy_cats_struct(Cats2); + Vect_destroy_line_struct(Points); return 0; } diff --git a/vector/v.net/v.net.html b/vector/v.net/v.net.html index baef25e5484..c9a8cb57eb5 100644 --- a/vector/v.net/v.net.html +++ b/vector/v.net/v.net.html @@ -39,9 +39,9 @@ <h2>DESCRIPTION</h2> category. In order to assign unique costs to each line, a new layer needs to be created with<br> <!-- like this it is rather useless, still to be improved (eg as a full example below!) --> -<tt>v.category input=yourmap option=add cat=1 step=1 layer=3 output=newmap</tt><br> +<code>v.category input=yourmap option=add cat=1 step=1 layer=3 output=newmap</code><br> followed by<br> -<tt>v.db.addtable map=newmap layer=3 table=tablename</tt>.</li> +<code>v.db.addtable map=newmap layer=3 table=tablename</code>.</li> <li>Create nodes and arcs from a vector line/boundary file using the <em>node</em> operation. This is useful if you are mostly interested @@ -173,7 +173,7 @@ <h3>Merge in nodes from a separate map within given threshold</h3> v.category streets_net option=report </pre></div> -The nodes are stored in layer 2 unless <tt>node_layer=1</tt> is used. +The nodes are stored in layer 2 unless <code>node_layer=1</code> is used. <h3>Generating network for vector point map</h3> diff --git a/vector/v.normal/v.normal.html b/vector/v.normal/v.normal.html index 327fcdcd5d1..2f40d6171e6 100644 --- a/vector/v.normal/v.normal.html +++ b/vector/v.normal/v.normal.html @@ -9,22 +9,22 @@ <h2>NOTES</h2> giving an index, ranges of indices, or multiple thereof. <ol> -<li> Sample skewness and kurtosis -<li> Geary's a-statistic and an approximate normal transformation -<li> Extreme normal deviates -<li> D'Agostino's D-statistic -<li> Modified Kuiper V-statistic -<li> Modified Watson U^2-statistic -<li> Durbin's Exact Test (modified Kolmogorov) -<li> Modified Anderson-Darling statistic -<li> Modified Cramer-Von Mises W^2-statistic -<li> Kolmogorov-Smirnov D-statistic (modified for normality testing) +<li> Sample skewness and kurtosis</li> +<li> Geary's a-statistic and an approximate normal transformation</li> +<li> Extreme normal deviates</li> +<li> D'Agostino's D-statistic</li> +<li> Modified Kuiper V-statistic</li> +<li> Modified Watson U^2-statistic</li> +<li> Durbin's Exact Test (modified Kolmogorov)</li> +<li> Modified Anderson-Darling statistic</li> +<li> Modified Cramer-Von Mises W^2-statistic</li> +<li> Kolmogorov-Smirnov D-statistic (modified for normality testing)</li> <li> Chi-Square test statistic (equal probability classes) and - the number of degrees of freedom -<li> Shapiro-Wilk W Test -<li> Weisberg-Binghams W'' (similar to Shapiro-Francia's W') -<li> Royston's extension of W for large samples -<li> Kotz Separate-Families Test for Lognormality vs. Normality + the number of degrees of freedom</li> +<li> Shapiro-Wilk W Test</li> +<li> Weisberg-Binghams W'' (similar to Shapiro-Francia's W')</li> +<li> Royston's extension of W for large samples</li> +<li> Kotz Separate-Families Test for Lognormality vs. Normality</li> </ol> <h2>EXAMPLE</h2> diff --git a/vector/v.out.ascii/v.out.ascii.html b/vector/v.out.ascii/v.out.ascii.html index d8b6440b1c7..e87bcba9075 100644 --- a/vector/v.out.ascii/v.out.ascii.html +++ b/vector/v.out.ascii/v.out.ascii.html @@ -19,17 +19,17 @@ <h2>NOTES</h2> <p>If old version is requested, the <b>output</b> files from <em>v.out.ascii</em> is placed in -the <tt>$LOCATION/$MAPSET/dig_ascii/</tt> -and <tt>$LOCATION/$MAPSET/dig_att</tt> directory. +the <code>$LOCATION/$MAPSET/dig_ascii/</code> +and <code>$LOCATION/$MAPSET/dig_att</code> directory. <p>If <b>layer > 0</b> then only features with a category number will be exported. Use <em><a href="v.category.html">v.category</a></em> to add them if needed or define <b>layer=-1</b> to export also features without category. <p><em>v.out.ascii</em> in the old version mode (<b>-o</b>) does not -copy the <tt>dig_cats</tt> file associated with the binary +copy the <code>dig_cats</code> file associated with the binary vector <b>input</b> map to the new <b>output</b> file name. The user -must copy the <tt>dig_cats</tt> file to the new <b>output</b> name if +must copy the <code>dig_cats</code> file to the new <b>output</b> name if this is desired (e.g. using the UNIX <em>cp</em> command). <p>It is possible to output the coordinates of vertices in a non-points vector @@ -90,7 +90,7 @@ <h3>Point mode</h3> Print also selected attributes: <div class="code"><pre> -v.out.ascii input=geodetic_pts format=point where="cat > 5 and cat <= 8" columns=GEOD_NAME +v.out.ascii input=geodetic_pts format=point where="cat > 5 and cat <= 8" columns=GEOD_NAME 573638.06289275|271623.25042595|6|27 WC 6 574416.81289275|274116.65542595|7|27 WC 7 @@ -100,7 +100,7 @@ <h3>Point mode</h3> To print all attributes type <b>columns=*</b>: <div class="code"><pre> -v.out.ascii input=geodetic_pts format=point where="cat > 5 and cat <= 8" columns=* +v.out.ascii input=geodetic_pts format=point where="cat > 5 and cat <= 8" columns=* 573638.06289275|271623.25042595|6|6|0.00000000|0.00000000|6|6|27 WC 6|573638.09200000|271623.24100000|0.00|0|1.00000000|1.00000000 574416.81289275|274116.65542595|7|7|0.00000000|0.00000000|7|7|27 WC 7|574416.84100000|274116.64900000|0.00|0|1.00000000|1.00000000 575301.31189275|275303.81342595|8|8|0.00000000|0.00000000|8|8|27 WC 8|575301.30600000|275303.82600000|0.00|0|1.00000000|1.00000000 diff --git a/vector/v.out.dxf/main.c b/vector/v.out.dxf/main.c index d00a15e1378..2e30172dd10 100644 --- a/vector/v.out.dxf/main.c +++ b/vector/v.out.dxf/main.c @@ -195,6 +195,8 @@ int add_plines(struct Map_info *Map, int field, double textsize) } nlines_dxf++; } + Vect_destroy_line_struct(Points); + Vect_destroy_cats_struct(Cats); return nlines_dxf; } diff --git a/vector/v.out.dxf/v.out.dxf.html b/vector/v.out.dxf/v.out.dxf.html index 95057f05f22..1cf45b3d3e0 100644 --- a/vector/v.out.dxf/v.out.dxf.html +++ b/vector/v.out.dxf/v.out.dxf.html @@ -8,7 +8,7 @@ <h2>DESCRIPTION</h2> <h2>NOTES</h2> -DXF files output by AutoCAD have the suffix <tt>.dxf</tt> +DXF files output by AutoCAD have the suffix <code>.dxf</code> <h2>REFERENCES</h2> diff --git a/vector/v.out.lidar/v.out.lidar.html b/vector/v.out.lidar/v.out.lidar.html index 2d0708d98ae..295812480e0 100644 --- a/vector/v.out.lidar/v.out.lidar.html +++ b/vector/v.out.lidar/v.out.lidar.html @@ -1,7 +1,7 @@ <h2>DESCRIPTION</h2> <em>v.out.lidar</em> converts GRASS vector map to a LiDAR point clouds -in LAS format using the <a href="http://www.liblas.org">libLAS</a> library. +in LAS format using the <a href="https://liblas.org">libLAS</a> library. <p> The <b>-r</b> flag limits the export to the current computational region @@ -20,7 +20,7 @@ <h2>NOTES</h2> The typical file extensions for the LAS format are .las and .laz (compressed). The compressed LAS (.laz) format can be exported only if libLAS has been -compiled with <a href="http://www.laszip.org/">LASzip</a> support. +compiled with <a href="https://laszip.org/">LASzip</a> support. It is also good when libLAS was compiled with GDAL. This is needed when working with projections. @@ -39,7 +39,7 @@ <h2>REFERENCES</h2> <a href="https://www.asprs.org/committee-general/laser-las-file-format-exchange-activities.html"> ASPRS LAS format</a><br> -<a href="http://www.liblas.org/">LAS library</a> <br> +<a href="https://liblas.org/">LAS library</a> <br> <h2>SEE ALSO</h2> diff --git a/vector/v.out.ogr/args.c b/vector/v.out.ogr/args.c index 18841837e24..8310c302c50 100644 --- a/vector/v.out.ogr/args.c +++ b/vector/v.out.ogr/args.c @@ -77,6 +77,19 @@ void parse_args(int argc, char **argv, struct Options *options, _("OGR layer creation option (format specific, NAME=VALUE)"); options->lco->guisection = _("Creation"); + options->method = G_define_option(); + options->method->key = "method"; + options->method->type = TYPE_STRING; + options->method->required = NO; + options->method->options = "fast,slow"; + options->method->answer = "fast"; + options->method->label = _("Method to use for export, " + "default is fast export, " + "use slow export in case of problems with " + "the fast method"); + G_asprintf((char **)&options->method->descriptions, "fast;%s;slow;%s", + _("new, faster method"), _("old, slower method")); + flags->update = G_define_flag(); flags->update->key = 'u'; flags->update->description = diff --git a/vector/v.out.ogr/attrb_fast.c b/vector/v.out.ogr/attrb_fast.c new file mode 100644 index 00000000000..45ee832f076 --- /dev/null +++ b/vector/v.out.ogr/attrb_fast.c @@ -0,0 +1,155 @@ +#include <grass/glocale.h> + +#include "local_proto.h" + +int mk_att_fast(int cat, struct field_info *Fi, int ncol, int *colctype, + const char **colname, int doatt, int nocat, + OGRFeatureH Ogr_feature, int *noatt, dbCursor *cursor, + int *more, int *db_cat, int key_col_index) +{ + int j, ogrfieldnum; + dbTable *Table; + static int first = 1; + static dbString dbstring; + dbColumn *Column; + dbValue *Value; + + G_debug(2, "mk_att() cat = %d, doatt = %d", cat, doatt); + + /* init constants */ + if (first) { + db_init_string(&dbstring); + first = 0; + } + + /* Attributes */ + /* Reset */ + if (!doatt) { + ogrfieldnum = OGR_F_GetFieldIndex(Ogr_feature, GV_KEY_COLUMN); + if (ogrfieldnum > -1) + OGR_F_UnsetField(Ogr_feature, ogrfieldnum); + /* doatt reset moved into have cat loop as the table needs to be + open to know the OGR field ID. Hopefully this has no ill consequences + */ + } + + /* Read & set attributes */ + if (cat >= 0) { /* Line with category */ + if (doatt) { + /* get current entries from cursor, + * check cat value in attributes */ + + Table = db_get_cursor_table(cursor); + while (*more && cat > *db_cat) { + Column = db_get_table_column(Table, key_col_index); + Value = db_get_column_value(Column); + + /* yes, the key column is sometimes of type double */ + switch (colctype[key_col_index]) { + case DB_C_TYPE_INT: + *db_cat = db_get_value_int(Value); + break; + case DB_C_TYPE_DOUBLE: + *db_cat = (int)db_get_value_double(Value); + break; + } + + G_debug(2, "found db_cat %d for cat %d in column %s", *db_cat, + cat, db_get_column_name(Column)); + + if (cat > *db_cat) { + if (db_fetch(cursor, DB_NEXT, more) != DB_OK) { + G_fatal_error(_("Unable to fetch data from table")); + } + } + } + + if (!(*more) || cat != *db_cat) { + G_debug(1, "No database record for cat = %d", cat); + /* Set at least key column to category */ + if (!nocat) { + ogrfieldnum = OGR_F_GetFieldIndex(Ogr_feature, Fi->key); + OGR_F_SetFieldInteger(Ogr_feature, ogrfieldnum, cat); + (*noatt)++; + } + else { + G_fatal_error(_("No database record for cat = %d and " + "export of 'cat' disabled"), + cat); + } + } + else { + for (j = 0; j < ncol; j++) { + Column = db_get_table_column(Table, j); + Value = db_get_column_value(Column); + db_convert_column_value_to_string( + Column, &dbstring); /* for debug only */ + G_debug(2, "col %d : val = %s", j, + db_get_string(&dbstring)); + + G_debug(2, " colctype = %d", colctype[j]); + + if (nocat && strcmp(Fi->key, colname[j]) == 0) + continue; + + ogrfieldnum = OGR_F_GetFieldIndex(Ogr_feature, colname[j]); + G_debug(2, " column = %s -> fieldnum = %d", colname[j], + ogrfieldnum); + + if (ogrfieldnum < 0) { + G_debug(4, + "Could not get OGR field number for column %s", + colname[j]); + continue; + } + + /* Reset */ + if ((nocat && strcmp(Fi->key, colname[j]) == 0) == 0) { + /* if this is 'cat', then execute the following only if + * the '-s' flag was NOT given */ + OGR_F_SetFieldNull(Ogr_feature, ogrfieldnum); + } + + /* prevent writing NULL values */ + if (!db_test_value_isnull(Value)) { + if ((nocat && strcmp(Fi->key, colname[j]) == 0) == 0) { + /* if this is 'cat', then execute the following only + * if the '-s' flag was NOT given */ + + switch (colctype[j]) { + case DB_C_TYPE_INT: + OGR_F_SetFieldInteger(Ogr_feature, ogrfieldnum, + db_get_value_int(Value)); + break; + case DB_C_TYPE_DOUBLE: + OGR_F_SetFieldDouble( + Ogr_feature, ogrfieldnum, + db_get_value_double(Value)); + break; + case DB_C_TYPE_STRING: + OGR_F_SetFieldString( + Ogr_feature, ogrfieldnum, + db_get_value_string(Value)); + break; + case DB_C_TYPE_DATETIME: + db_convert_column_value_to_string(Column, + &dbstring); + OGR_F_SetFieldString(Ogr_feature, ogrfieldnum, + db_get_string(&dbstring)); + break; + } + } + } + else + OGR_F_SetFieldNull(Ogr_feature, ogrfieldnum); + } + } + } + else { /* Use cat only */ + ogrfieldnum = OGR_F_GetFieldIndex(Ogr_feature, GV_KEY_COLUMN); + OGR_F_SetFieldInteger(Ogr_feature, ogrfieldnum, cat); + } + } + + return 1; +} diff --git a/vector/v.out.ogr/export_areas.c b/vector/v.out.ogr/export_areas.c index acab7164a8d..5cbc78c408d 100644 --- a/vector/v.out.ogr/export_areas.c +++ b/vector/v.out.ogr/export_areas.c @@ -13,8 +13,9 @@ static int export_areas_multi(struct Map_info *, int, int, OGRFeatureDefnH, static OGRGeometryH create_polygon(struct Map_info *, int, struct line_pnts *, int); +#if 0 /* maybe useful */ -void reverse_points(struct line_pnts *Points) +static void reverse_points(struct line_pnts *Points) { int i, j, nhalf; double tmp; @@ -35,6 +36,7 @@ void reverse_points(struct line_pnts *Points) Points->z[j] = tmp; } } +#endif /* export areas as single/multi-polygons */ int export_areas(struct Map_info *In, int field, int multi, int donocat, diff --git a/vector/v.out.ogr/export_areas_fast.c b/vector/v.out.ogr/export_areas_fast.c new file mode 100644 index 00000000000..cad77b2279a --- /dev/null +++ b/vector/v.out.ogr/export_areas_fast.c @@ -0,0 +1,539 @@ +#include <grass/glocale.h> + +#include "local_proto.h" + +static int export_areas_single(struct Map_info *, int, int, OGRFeatureDefnH, + OGRLayerH, struct field_info *, dbDriver *, int, + int *, const char **, int, int, int *, int *, + int); +static int export_areas_multi(struct Map_info *, int, int, OGRFeatureDefnH, + OGRLayerH, struct field_info *, dbDriver *, int, + int *, const char **, int, int, int *, int *, + int); +static OGRGeometryH create_polygon(struct Map_info *, int, struct line_pnts *, + int); + +#if 0 +/* maybe useful */ +static void reverse_points(struct line_pnts *Points) +{ + int i, j, nhalf; + double tmp; + + nhalf = Points->n_points / 2; + + for (i = 0, j = Points->n_points - 1; i < nhalf; i++, j--) { + tmp = Points->x[i]; + Points->x[i] = Points->x[j]; + Points->x[j] = tmp; + + tmp = Points->y[i]; + Points->y[i] = Points->y[j]; + Points->y[j] = tmp; + + tmp = Points->z[i]; + Points->z[i] = Points->z[j]; + Points->z[j] = tmp; + } +} +#endif + +/* export areas as single/multi-polygons */ +int export_areas_fast(struct Map_info *In, int field, int multi, int donocat, + OGRFeatureDefnH Ogr_featuredefn, OGRLayerH Ogr_layer, + struct field_info *Fi, dbDriver *driver, int ncol, + int *colctype, const char **colname, int doatt, int nocat, + int *noatt, int *fout, int outer_ring_ccw) +{ + if (multi) + /* export as multi-polygons */ + return export_areas_multi( + In, field, donocat, Ogr_featuredefn, Ogr_layer, Fi, driver, ncol, + colctype, colname, doatt, nocat, noatt, fout, outer_ring_ccw); + + /* export as polygons */ + return export_areas_single(In, field, donocat, Ogr_featuredefn, Ogr_layer, + Fi, driver, ncol, colctype, colname, doatt, + nocat, noatt, fout, outer_ring_ccw); +} + +int export_areas_single(struct Map_info *In, int field, int donocat, + OGRFeatureDefnH Ogr_featuredefn, OGRLayerH Ogr_layer, + struct field_info *Fi, dbDriver *driver, int ncol, + int *colctype, const char **colname, int doatt, + int nocat, int *n_noatt, int *n_nocat, + int outer_ring_ccw) +{ + int i; + int cat, last_cat, db_cat, centroid, area; + int n_exported; + + struct line_pnts *Points; + struct line_cats *Cats; + + int findex; + struct Cat_index *ci; + int cat_index, n_cats; + + dbString dbstring; + char buf[SQL_BUFFER_SIZE]; + dbCursor cursor; + int more; + int key_col_index; + + OGRGeometryH Ogr_geometry; + OGRFeatureH Ogr_feature; + + Points = Vect_new_line_struct(); + Cats = Vect_new_cats_struct(); + + n_exported = 0; + + /* get category index for given field */ + findex = Vect_cidx_get_field_index(In, field); + if (findex == -1) { + G_fatal_error(_("Unable to export multi-features. No category index " + "for layer %d."), + field); + } + + ci = &(In->plus.cidx[findex]); + n_cats = ci->n_cats; + + if (donocat) + G_message(_("Exporting features with category...")); + + /* select attributes ordered by category value */ + db_init_string(&dbstring); + sprintf(buf, "SELECT * FROM %s ORDER BY %s ASC", Fi->table, Fi->key); + G_debug(2, "SQL: %s", buf); + db_set_string(&dbstring, buf); + if (db_open_select_cursor(driver, &dbstring, &cursor, DB_SEQUENTIAL) != + DB_OK) { + G_fatal_error(_("Cannot select attributes sorted by %s"), Fi->key); + } + + if (db_fetch(&cursor, DB_NEXT, &more) != DB_OK) + G_fatal_error(_("Unable to fetch data from table")); + + /* get index of key column */ + key_col_index = -1; + for (i = 0; i < ncol; i++) { + if (strcmp(Fi->key, colname[i]) == 0) { + key_col_index = i; + break; + } + } + + last_cat = -1; + db_cat = -1; + for (cat_index = 0; cat_index < n_cats; cat_index++) { + + G_percent(cat_index, n_cats, 5); + + /* get area's category */ + if (!(ci->cat[cat_index][1] & GV_CENTROID)) + continue; + + cat = ci->cat[cat_index][0]; + /* make sure the cidx is ordered by cat */ + if (cat < last_cat) + G_fatal_error(_("Category index is not sorted ascending by cat!")); + last_cat = cat; + + centroid = ci->cat[cat_index][2]; + + area = Vect_get_centroid_area(In, centroid); + + if (area < 1) { + /* centroid not in area or duplicate centroid */ + continue; + } + + /* create polygon from area */ + Ogr_geometry = create_polygon(In, area, Points, outer_ring_ccw); + + /* add feature */ + Ogr_feature = OGR_F_Create(Ogr_featuredefn); + OGR_F_SetGeometry(Ogr_feature, Ogr_geometry); + /* get attributes */ + mk_att_fast(cat, Fi, ncol, colctype, colname, doatt, nocat, Ogr_feature, + n_noatt, &cursor, &more, &db_cat, key_col_index); + if (OGR_L_CreateFeature(Ogr_layer, Ogr_feature) != OGRERR_NONE) { + G_fatal_error(_("Failed to create OGR feature")); + } + else + n_exported++; + + OGR_F_Destroy(Ogr_feature); + OGR_G_DestroyGeometry(Ogr_geometry); + } + + if (donocat) + G_message(_("Exporting features without category...")); + + if (doatt) { + db_close_cursor(&cursor); + if (donocat) { + cat = -1; + if (db_open_select_cursor(driver, &dbstring, &cursor, + DB_SEQUENTIAL) != DB_OK) { + G_fatal_error(_("Cannot select attributes for cat = %d"), cat); + } + if (db_fetch(&cursor, DB_NEXT, &more) != DB_OK) + G_fatal_error(_("Unable to fetch data from table")); + } + } + + for (area = 1; area <= Vect_get_num_areas(In); area++) { + centroid = Vect_get_area_centroid(In, area); + /* skip areas without centroid */ + if (centroid == 0) + continue; + + /* get areas's category */ + Vect_get_area_cats(In, area, Cats); + Vect_cat_get(Cats, field, &cat); + /* skip areas with category */ + if (cat >= 0) + continue; + /* skip areas without category, do not export not labeled */ + if (cat < 0 && !donocat) { + (*n_nocat)++; + continue; + } + + (*n_nocat)++; + + /* create polygon from area */ + Ogr_geometry = create_polygon(In, area, Points, outer_ring_ccw); + + /* add feature */ + Ogr_feature = OGR_F_Create(Ogr_featuredefn); + OGR_F_SetGeometry(Ogr_feature, Ogr_geometry); + /* no attributes for features without category */ + cat = -1; + db_cat = -2; + mk_att_fast(cat, Fi, ncol, colctype, colname, doatt, nocat, Ogr_feature, + n_noatt, &cursor, &more, &db_cat, key_col_index); + if (OGR_L_CreateFeature(Ogr_layer, Ogr_feature) != OGRERR_NONE) { + G_fatal_error(_("Failed to create OGR feature")); + } + else + n_exported++; + + OGR_F_Destroy(Ogr_feature); + OGR_G_DestroyGeometry(Ogr_geometry); + } + + if (donocat && doatt) + db_close_cursor(&cursor); + + Vect_destroy_line_struct(Points); + + return n_exported; +} + +int export_areas_multi(struct Map_info *In, int field, int donocat, + OGRFeatureDefnH Ogr_featuredefn, OGRLayerH Ogr_layer, + struct field_info *Fi, dbDriver *driver, int ncol, + int *colctype, const char **colname, int doatt, + int nocat, int *n_noatt, int *n_nocat, + int outer_ring_ccw) +{ + int i, n_exported, area, centroid; + int cat, last_cat, db_cat, line, findex, ipart; + + struct line_pnts *Points; + struct line_cats *Cats; + struct ilist *line_list, *lcats; + + struct Cat_index *ci; + int cat_index, n_cats; + + dbString dbstring; + char buf[SQL_BUFFER_SIZE]; + dbCursor cursor; + int more; + int key_col_index; + + OGRGeometryH Ogr_geometry, Ogr_geometry_part; + OGRFeatureH Ogr_feature; + OGRwkbGeometryType wkbtype, wkbtype_part; + + Points = Vect_new_line_struct(); + Cats = Vect_new_cats_struct(); + line_list = Vect_new_list(); + lcats = Vect_new_list(); + + n_exported = 0; + + /* check if category index is available for given field */ + findex = Vect_cidx_get_field_index(In, field); + if (findex == -1) { + G_fatal_error(_("Unable to export multi-features. No category index " + "for layer %d."), + field); + } + + ci = &(In->plus.cidx[findex]); + n_cats = ci->n_cats; + + /* determine type */ + wkbtype_part = wkbPolygon; + wkbtype = get_multi_wkbtype(wkbtype_part); + + if (donocat) + G_message(_("Exporting features with category...")); + + key_col_index = -1; + more = 1; + if (doatt) { + /* select attributes ordered by category value */ + db_init_string(&dbstring); + sprintf(buf, "SELECT * FROM %s ORDER BY %s ASC", Fi->table, Fi->key); + G_debug(2, "SQL: %s", buf); + db_set_string(&dbstring, buf); + if (db_open_select_cursor(driver, &dbstring, &cursor, DB_SEQUENTIAL) != + DB_OK) { + G_fatal_error(_("Cannot select attributes sorted by %s"), Fi->key); + } + + if (db_fetch(&cursor, DB_NEXT, &more) != DB_OK) + G_fatal_error(_("Unable to fetch data from table")); + + /* get index of key column */ + key_col_index = -1; + for (i = 0; i < ncol; i++) { + if (strcmp(Fi->key, colname[i]) == 0) { + key_col_index = i; + break; + } + } + } + + last_cat = -1; + db_cat = -1; + cat_index = 0; + while (cat_index < n_cats) { + + G_percent(cat_index, n_cats, 5); + + cat = ci->cat[cat_index][0]; + + /* make sure the cidx is ordered by cat */ + if (cat < last_cat) + G_fatal_error(_("Category index is not sorted ascending by cat!")); + last_cat = cat; + + /* collect all features with current cat */ + Vect_reset_list(line_list); + while (cat_index < n_cats && ci->cat[cat_index][0] == cat) { + if (ci->cat[cat_index][1] & GV_CENTROID) { + Vect_list_append(line_list, ci->cat[cat_index][2]); + } + cat_index++; + } + + /* create multi-feature */ + Ogr_geometry = OGR_G_CreateGeometry(wkbtype); + + /* build simple features geometry, go through all parts */ + for (ipart = 0; ipart < line_list->n_values; ipart++) { + line = line_list->value[ipart]; + G_debug(3, "cat=%d, line=%d -> part=%d", cat, line, ipart); + + /* get centroid's category */ + Vect_read_line(In, NULL, Cats, line); + /* check for category consistency */ + Vect_field_cat_get(Cats, field, lcats); + if (!Vect_val_in_list(lcats, cat)) + G_fatal_error(_("Unable to create multi-feature. " + "Category %d not found in line %d, field %d"), + cat, line, field); + + /* find corresponding area */ + area = Vect_get_centroid_area(In, line); + if (area <= 0) + continue; + + /* create polygon from area */ + Ogr_geometry_part = + create_polygon(In, area, Points, outer_ring_ccw); + + /* add part */ + OGR_G_AddGeometryDirectly(Ogr_geometry, Ogr_geometry_part); + } + + if (!OGR_G_IsEmpty(Ogr_geometry)) { + /* write multi-feature */ + Ogr_feature = OGR_F_Create(Ogr_featuredefn); + OGR_F_SetGeometry(Ogr_feature, Ogr_geometry); + /* get attributes */ + mk_att_fast(cat, Fi, ncol, colctype, colname, doatt, nocat, + Ogr_feature, n_noatt, &cursor, &more, &db_cat, + key_col_index); + if (OGR_L_CreateFeature(Ogr_layer, Ogr_feature) != OGRERR_NONE) { + G_fatal_error(_("Failed to create OGR feature")); + } + else + n_exported++; + + OGR_F_Destroy(Ogr_feature); + } + else { + /* skip empty features */ + G_debug(3, "multi-feature is empty -> skipped"); + } + + OGR_G_DestroyGeometry(Ogr_geometry); + } + + if (donocat) + G_message(_("Exporting features without category...")); + + /* check areas without category, if -c flag is given write them as + * one multi-feature */ + Ogr_geometry = OGR_G_CreateGeometry(wkbtype); + + if (doatt) { + db_close_cursor(&cursor); + if (donocat) { + cat = -1; + if (db_open_select_cursor(driver, &dbstring, &cursor, + DB_SEQUENTIAL) != DB_OK) { + G_fatal_error(_("Cannot select attributes for cat = %d"), cat); + } + if (db_fetch(&cursor, DB_NEXT, &more) != DB_OK) + G_fatal_error(_("Unable to fetch data from table")); + } + } + + for (area = 1; area <= Vect_get_num_areas(In); area++) { + centroid = Vect_get_area_centroid(In, area); + /* skip areas without centroid */ + if (centroid == 0) + continue; + + /* get areas's category */ + Vect_get_area_cats(In, area, Cats); + Vect_cat_get(Cats, field, &cat); + /* skip areas with category */ + if (cat >= 0) + continue; + /* skip areas without category, do not export not labeled */ + if (cat < 0 && !donocat) { + (*n_nocat)++; + continue; + } + + /* create polygon from area */ + Ogr_geometry_part = create_polygon(In, area, Points, outer_ring_ccw); + + /* add part */ + OGR_G_AddGeometryDirectly(Ogr_geometry, Ogr_geometry_part); + + (*n_nocat)++; + } + + if (!OGR_G_IsEmpty(Ogr_geometry)) { + /* write multi-feature */ + Ogr_feature = OGR_F_Create(Ogr_featuredefn); + OGR_F_SetGeometry(Ogr_feature, Ogr_geometry); + /* no attributes for features without category */ + cat = -1; + db_cat = -2; + mk_att_fast(cat, Fi, ncol, colctype, colname, doatt, nocat, Ogr_feature, + n_noatt, &cursor, &more, &db_cat, key_col_index); + if (OGR_L_CreateFeature(Ogr_layer, Ogr_feature) != OGRERR_NONE) { + G_fatal_error(_("Failed to create OGR feature")); + } + else + n_exported++; + + OGR_F_Destroy(Ogr_feature); + } + else { + /* skip empty features */ + G_debug(3, "multi-feature is empty -> skipped"); + } + + OGR_G_DestroyGeometry(Ogr_geometry); + + if (donocat && doatt) + db_close_cursor(&cursor); + + Vect_destroy_line_struct(Points); + Vect_destroy_cats_struct(Cats); + Vect_destroy_list(line_list); + Vect_destroy_list(lcats); + + return n_exported; +} + +OGRGeometryH create_polygon(struct Map_info *In, int area, + struct line_pnts *Points, int outer_ring_ccw) +{ + int j, k; + OGRGeometryH Ogr_geometry, ring; + + Vect_get_area_points(In, area, Points); + + Ogr_geometry = OGR_G_CreateGeometry(wkbPolygon); + ring = OGR_G_CreateGeometry(wkbLinearRing); + + /* Area */ + if (Vect_is_3d(In)) { + if (outer_ring_ccw) { + for (j = Points->n_points - 1; j >= 0; j--) + OGR_G_AddPoint(ring, Points->x[j], Points->y[j], Points->z[j]); + } + else { + for (j = 0; j < Points->n_points; j++) + OGR_G_AddPoint(ring, Points->x[j], Points->y[j], Points->z[j]); + } + } + else { + if (outer_ring_ccw) { + for (j = Points->n_points - 1; j >= 0; j--) + OGR_G_AddPoint_2D(ring, Points->x[j], Points->y[j]); + } + else { + for (j = 0; j < Points->n_points; j++) + OGR_G_AddPoint_2D(ring, Points->x[j], Points->y[j]); + } + } + + OGR_G_AddGeometryDirectly(Ogr_geometry, ring); + + /* Isles */ + for (k = 0; k < Vect_get_area_num_isles(In, area); k++) { + Vect_get_isle_points(In, Vect_get_area_isle(In, area, k), Points); + ring = OGR_G_CreateGeometry(wkbLinearRing); + if (Vect_is_3d(In)) { + if (outer_ring_ccw) { + for (j = Points->n_points - 1; j >= 0; j--) + OGR_G_AddPoint(ring, Points->x[j], Points->y[j], + Points->z[j]); + } + else { + for (j = 0; j < Points->n_points; j++) + OGR_G_AddPoint(ring, Points->x[j], Points->y[j], + Points->z[j]); + } + } + else { + if (outer_ring_ccw) { + for (j = Points->n_points - 1; j >= 0; j--) + OGR_G_AddPoint_2D(ring, Points->x[j], Points->y[j]); + } + else { + for (j = 0; j < Points->n_points; j++) + OGR_G_AddPoint_2D(ring, Points->x[j], Points->y[j]); + } + } + OGR_G_AddGeometryDirectly(Ogr_geometry, ring); + } + + return Ogr_geometry; +} diff --git a/vector/v.out.ogr/export_lines_fast.c b/vector/v.out.ogr/export_lines_fast.c new file mode 100644 index 00000000000..e9509c8d972 --- /dev/null +++ b/vector/v.out.ogr/export_lines_fast.c @@ -0,0 +1,546 @@ +#include <grass/glocale.h> + +#include "local_proto.h" + +static int export_lines_single(struct Map_info *, int, int, int, int, + OGRFeatureDefnH, OGRLayerH, struct field_info *, + dbDriver *, int, int *, const char **, int, int, + int *, int *); +static int export_lines_multi(struct Map_info *, int, int, int, int, + OGRFeatureDefnH, OGRLayerH, struct field_info *, + dbDriver *, int, int *, const char **, int, int, + int *, int *); + +static void line_to_polygon(OGRGeometryH, const struct line_pnts *); + +static void add_part(OGRGeometryH, OGRwkbGeometryType, int, struct line_pnts *); + +static OGRGeometryH build_geometry(struct Map_info *, struct line_pnts *, int, + int, int); + +/* export primitives as single/multi-features */ +int export_lines_fast(struct Map_info *In, int field, int otype, int multi, + int donocat, int force_poly, + OGRFeatureDefnH Ogr_featuredefn, OGRLayerH Ogr_layer, + struct field_info *Fi, dbDriver *driver, int ncol, + int *colctype, const char **colname, int doatt, int nocat, + int *n_noatt, int *n_nocat) +{ + if (multi) + /* export as multi-features */ + return export_lines_multi(In, field, otype, donocat, force_poly, + Ogr_featuredefn, Ogr_layer, Fi, driver, ncol, + colctype, colname, doatt, nocat, n_noatt, + n_nocat); + + /* export as single features */ + return export_lines_single( + In, field, otype, donocat, force_poly, Ogr_featuredefn, Ogr_layer, Fi, + driver, ncol, colctype, colname, doatt, nocat, n_noatt, n_nocat); +} + +/* export line as single feature */ +int export_lines_single(struct Map_info *In, int field, int otype, int donocat, + int force_poly, OGRFeatureDefnH Ogr_featuredefn, + OGRLayerH Ogr_layer, struct field_info *Fi, + dbDriver *driver, int ncol, int *colctype, + const char **colname, int doatt, int nocat, + int *n_noatt, int *n_nocat) +{ + int i, n_exported; + int cat, last_cat, db_cat, type; + + struct line_pnts *Points; + struct line_cats *Cats; + + int findex; + struct Cat_index *ci; + int cat_index, n_cats; + + dbString dbstring; + char buf[SQL_BUFFER_SIZE]; + dbCursor cursor; + int more; + int key_col_index; + + OGRGeometryH Ogr_geometry; + OGRFeatureH Ogr_feature; + + Points = Vect_new_line_struct(); + Cats = Vect_new_cats_struct(); + + n_exported = 0; + + /* get category index for given field */ + findex = Vect_cidx_get_field_index(In, field); + if (findex == -1) { + G_fatal_error(_("Unable to export multi-features. No category index " + "for layer %d."), + field); + } + + ci = &(In->plus.cidx[findex]); + n_cats = ci->n_cats; + + if (donocat) + G_message(_("Exporting features with category...")); + + key_col_index = -1; + more = 1; + if (doatt) { + /* select attributes ordered by category value */ + db_init_string(&dbstring); + sprintf(buf, "SELECT * FROM %s ORDER BY %s ASC", Fi->table, Fi->key); + G_debug(2, "SQL: %s", buf); + db_set_string(&dbstring, buf); + if (db_open_select_cursor(driver, &dbstring, &cursor, DB_SEQUENTIAL) != + DB_OK) { + G_fatal_error(_("Cannot select attributes sorted by %s"), Fi->key); + } + + if (db_fetch(&cursor, DB_NEXT, &more) != DB_OK) + G_fatal_error(_("Unable to fetch data from table")); + + /* get index of key column */ + for (i = 0; i < ncol; i++) { + if (strcmp(Fi->key, colname[i]) == 0) { + key_col_index = i; + break; + } + } + } + + last_cat = -1; + db_cat = -1; + for (cat_index = 0; cat_index < n_cats; cat_index++) { + + G_percent(cat_index, n_cats, 5); + + if (!(ci->cat[cat_index][1] & otype)) + continue; + + cat = ci->cat[cat_index][0]; + /* make sure the cidx is ordered by cat */ + if (cat < last_cat) + G_fatal_error(_("Category index is not sorted ascending by cat!")); + last_cat = cat; + + i = ci->cat[cat_index][2]; + + /* read line */ + type = Vect_read_line(In, Points, Cats, i); + G_debug(2, "line = %d type = %d", i, type); + if (!(otype & type)) { + /* skip lines with different type */ + G_debug(2, "type %d not specified -> skipping", type); + continue; + } + + Ogr_geometry = build_geometry(In, Points, type, otype, force_poly); + + /* add feature */ + Ogr_feature = OGR_F_Create(Ogr_featuredefn); + OGR_F_SetGeometry(Ogr_feature, Ogr_geometry); + /* get attributes */ + mk_att_fast(cat, Fi, ncol, colctype, colname, doatt, nocat, Ogr_feature, + n_noatt, &cursor, &more, &db_cat, key_col_index); + if (OGR_L_CreateFeature(Ogr_layer, Ogr_feature) != OGRERR_NONE) { + G_fatal_error(_("Failed to create OGR feature")); + } + else + n_exported++; + + OGR_F_Destroy(Ogr_feature); + OGR_G_DestroyGeometry(Ogr_geometry); + } + + if (donocat) + G_message(_("Exporting features without category...")); + + if (doatt) { + db_close_cursor(&cursor); + if (donocat) { + cat = -1; + if (db_open_select_cursor(driver, &dbstring, &cursor, + DB_SEQUENTIAL) != DB_OK) { + G_fatal_error(_("Cannot select attributes for cat = %d"), cat); + } + if (db_fetch(&cursor, DB_NEXT, &more) != DB_OK) + G_fatal_error(_("Unable to fetch data from table")); + } + } + + /* this loop is needed to count features without category in the layer + * to be exported */ + Vect_rewind(In); + while (TRUE) { + type = Vect_read_next_line(In, Points, Cats); + if (type < 0) + break; + + Vect_cat_get(Cats, field, &cat); + if (cat >= 0) + continue; /* skip features with category */ + if (cat < 0 && !donocat) { + (*n_nocat)++; + continue; /* skip lines without category, do not export + * not labeled */ + } + + (*n_nocat)++; + + db_cat = -2; + cat = -1; + + /* code duplicated from above --> */ + Ogr_geometry = build_geometry(In, Points, type, otype, force_poly); + + /* add feature */ + Ogr_feature = OGR_F_Create(Ogr_featuredefn); + OGR_F_SetGeometry(Ogr_feature, Ogr_geometry); + /* no attributes for features without category */ + cat = -1; + db_cat = -2; + mk_att_fast(cat, Fi, ncol, colctype, colname, doatt, nocat, Ogr_feature, + n_noatt, &cursor, &more, &db_cat, key_col_index); + if (OGR_L_CreateFeature(Ogr_layer, Ogr_feature) != OGRERR_NONE) { + G_fatal_error(_("Failed to create OGR feature")); + } + else + n_exported++; + + OGR_F_Destroy(Ogr_feature); + OGR_G_DestroyGeometry(Ogr_geometry); + /* <-- code duplicated from above */ + } + if (doatt && donocat) + db_close_cursor(&cursor); + + Vect_destroy_line_struct(Points); + Vect_destroy_cats_struct(Cats); + + return n_exported; +} + +/* export line as multi-feature */ +int export_lines_multi(struct Map_info *In, int field, int otype, int donocat, + int force_poly, OGRFeatureDefnH Ogr_featuredefn, + OGRLayerH Ogr_layer, struct field_info *Fi, + dbDriver *driver, int ncol, int *colctype, + const char **colname, int doatt, int nocat, int *n_noatt, + int *n_nocat) +{ + int i, n_exported; + int cat, last_cat, db_cat, type; + int line, findex, ipart; + + struct line_pnts *Points; + struct line_cats *Cats; + struct ilist *line_list, *lcats; + + struct Cat_index *ci; + int cat_index, n_cats; + + dbString dbstring; + char buf[SQL_BUFFER_SIZE]; + dbCursor cursor; + int more; + int key_col_index; + + OGRGeometryH Ogr_geometry; + OGRFeatureH Ogr_feature; + OGRwkbGeometryType wkbtype, wkbtype_part; + + Points = Vect_new_line_struct(); + Cats = Vect_new_cats_struct(); + line_list = Vect_new_list(); + lcats = Vect_new_list(); + + n_exported = 0; + + /* check if category index is available for given field */ + findex = Vect_cidx_get_field_index(In, field); + if (findex == -1) { + G_fatal_error(_("Unable to export multi-features. No category index " + "for layer %d."), + field); + } + + ci = &(In->plus.cidx[findex]); + n_cats = ci->n_cats; + + if (donocat) + G_message(_("Exporting features with category...")); + + /* determine type */ + type = -1; /* unknown -> GeometryCollection */ + if (Vect_cidx_get_num_types_by_index(In, findex) == 1) + Vect_cidx_get_type_count_by_index(In, findex, 0, &type, NULL); + if (force_poly) + wkbtype_part = wkbPolygon; + else + wkbtype_part = get_wkbtype(type, otype); + wkbtype = get_multi_wkbtype(wkbtype_part); + + key_col_index = -1; + more = 1; + if (doatt) { + /* select attributes ordered by category value */ + db_init_string(&dbstring); + sprintf(buf, "SELECT * FROM %s ORDER BY %s ASC", Fi->table, Fi->key); + G_debug(2, "SQL: %s", buf); + db_set_string(&dbstring, buf); + if (db_open_select_cursor(driver, &dbstring, &cursor, DB_SEQUENTIAL) != + DB_OK) { + G_fatal_error(_("Cannot select attributes sorted by %s"), Fi->key); + } + + if (db_fetch(&cursor, DB_NEXT, &more) != DB_OK) + G_fatal_error(_("Unable to fetch data from table")); + + /* get index of key column */ + key_col_index = -1; + for (i = 0; i < ncol; i++) { + if (strcmp(Fi->key, colname[i]) == 0) { + key_col_index = i; + break; + } + } + } + + last_cat = -1; + db_cat = -1; + cat_index = 0; + while (cat_index < n_cats) { + + G_percent(cat_index, n_cats, 5); + + cat = ci->cat[cat_index][0]; + /* make sure the cidx is ordered by cat */ + if (cat < last_cat) + G_fatal_error(_("Category index is not sorted ascending by cat!")); + last_cat = cat; + + /* collect all features with current cat */ + Vect_reset_list(line_list); + while (cat_index < n_cats && ci->cat[cat_index][0] == cat) { + if (ci->cat[cat_index][1] & otype) { + Vect_list_append(line_list, ci->cat[cat_index][2]); + } + cat_index++; + } + + /* create multi-feature */ + Ogr_geometry = OGR_G_CreateGeometry(wkbtype); + + /* build simple features geometry, go through all parts */ + for (ipart = 0; ipart < line_list->n_values; ipart++) { + line = line_list->value[ipart]; + G_debug(3, "cat=%d, line=%d -> part=%d", cat, line, ipart); + + /* read line */ + type = Vect_read_line(In, Points, Cats, line); + + /* check for category consistency */ + Vect_field_cat_get(Cats, field, lcats); + if (!Vect_val_in_list(lcats, cat)) + G_fatal_error(_("Unable to create multi-feature. " + "Category %d not found in line %d, field %d"), + cat, line, field); + + /* add part */ + add_part(Ogr_geometry, wkbtype_part, type == GV_LINE && force_poly, + Points); + } + + if (!OGR_G_IsEmpty(Ogr_geometry)) { + /* write multi-feature */ + Ogr_feature = OGR_F_Create(Ogr_featuredefn); + OGR_F_SetGeometry(Ogr_feature, Ogr_geometry); + /* get attributes */ + mk_att_fast(cat, Fi, ncol, colctype, colname, doatt, nocat, + Ogr_feature, n_noatt, &cursor, &more, &db_cat, + key_col_index); + if (OGR_L_CreateFeature(Ogr_layer, Ogr_feature) != OGRERR_NONE) { + G_fatal_error(_("Failed to create OGR feature")); + } + else + n_exported++; + + OGR_F_Destroy(Ogr_feature); + } + else { + /* skip empty features */ + G_debug(3, "multi-feature is empty -> skipped"); + } + + OGR_G_DestroyGeometry(Ogr_geometry); + } + + if (donocat) + G_message(_("Exporting features without category...")); + + /* check lines without category, if -c flag is given write them as + * one multi-feature */ + Ogr_geometry = OGR_G_CreateGeometry(wkbtype); + + if (doatt) { + db_close_cursor(&cursor); + if (donocat) { + cat = -1; + if (db_open_select_cursor(driver, &dbstring, &cursor, + DB_SEQUENTIAL) != DB_OK) { + G_fatal_error(_("Cannot select attributes for cat = %d"), cat); + } + if (db_fetch(&cursor, DB_NEXT, &more) != DB_OK) + G_fatal_error(_("Unable to fetch data from table")); + } + } + + /* this loop is needed to count features without category in the layer + * to be exported */ + Vect_rewind(In); + while (TRUE) { + type = Vect_read_next_line(In, Points, Cats); + if (type < 0) + break; + + Vect_cat_get(Cats, field, &cat); + if (cat >= 0) + continue; /* skip features with category */ + if (cat < 0 && !donocat) { + (*n_nocat)++; + continue; /* skip lines without category, do not export + * not labeled */ + } + + (*n_nocat)++; + + /* add part */ + add_part(Ogr_geometry, wkbtype_part, type == GV_LINE && force_poly, + Points); + } + + if (!OGR_G_IsEmpty(Ogr_geometry)) { + /* write multi-feature */ + Ogr_feature = OGR_F_Create(Ogr_featuredefn); + OGR_F_SetGeometry(Ogr_feature, Ogr_geometry); + + /* no attributes for features without category */ + cat = -1; + db_cat = -2; + mk_att_fast(cat, Fi, ncol, colctype, colname, doatt, nocat, Ogr_feature, + n_noatt, &cursor, &more, &db_cat, key_col_index); + if (OGR_L_CreateFeature(Ogr_layer, Ogr_feature) != OGRERR_NONE) { + G_fatal_error(_("Failed to create OGR feature")); + } + else + n_exported++; + + OGR_F_Destroy(Ogr_feature); + } + else { + /* skip empty features */ + G_debug(3, "multi-feature is empty -> skipped"); + } + + OGR_G_DestroyGeometry(Ogr_geometry); + + if (donocat && doatt) + db_close_cursor(&cursor); + + Vect_destroy_line_struct(Points); + Vect_destroy_cats_struct(Cats); + Vect_destroy_list(line_list); + Vect_destroy_list(lcats); + + return n_exported; +} + +/* build polygon for closed line */ +void line_to_polygon(OGRGeometryH Ogr_geometry, const struct line_pnts *Points) +{ + int j; + OGRGeometryH ring; + + ring = OGR_G_CreateGeometry(wkbLinearRing); + + /* create a ring */ + for (j = 0; j < Points->n_points; j++) { + OGR_G_AddPoint(ring, Points->x[j], Points->y[j], Points->z[j]); + } + + /* close ring */ + if (Points->x[Points->n_points - 1] != Points->x[0] || + Points->y[Points->n_points - 1] != Points->y[0] || + Points->z[Points->n_points - 1] != Points->z[0]) { + OGR_G_AddPoint(ring, Points->x[0], Points->y[0], Points->z[0]); + } + + OGR_G_AddGeometryDirectly(Ogr_geometry, ring); +} + +void add_part(OGRGeometryH Ogr_geometry, OGRwkbGeometryType wkbtype_part, + int force_poly, struct line_pnts *Points) +{ + int j; + OGRGeometryH Ogr_geometry_part; + + Ogr_geometry_part = OGR_G_CreateGeometry(wkbtype_part); + if (force_poly) { + line_to_polygon(Ogr_geometry_part, Points); + } + else { + if (OGR_G_GetGeometryType(Ogr_geometry_part) == wkbPoint) { + /* GV_POINTS -> wkbPoint */ + OGR_G_AddPoint(Ogr_geometry_part, Points->x[0], Points->y[0], + Points->z[0]); + } + else { /* GV_LINES -> wkbLinestring */ + for (j = 0; j < Points->n_points; j++) { + OGR_G_AddPoint(Ogr_geometry_part, Points->x[j], Points->y[j], + Points->z[j]); + } + } + } + OGR_G_AddGeometryDirectly(Ogr_geometry, Ogr_geometry_part); +} + +static OGRGeometryH build_geometry(struct Map_info *In, + struct line_pnts *Points, int type, + int otype, int force_poly) +{ + OGRGeometryH Ogr_geometry; + + /* build simple features geometry */ + if ((type == GV_LINE && force_poly) || type == GV_FACE) { + /* lines to polygons + faces to 2.5D polygons */ + Ogr_geometry = OGR_G_CreateGeometry(wkbPolygon); + line_to_polygon(Ogr_geometry, Points); + } + else { + Ogr_geometry = OGR_G_CreateGeometry(get_wkbtype(type, otype)); + if (OGR_G_GetGeometryType(Ogr_geometry) == wkbPoint) { + /* GV_POINTS -> wkbPoint */ + if (Vect_is_3d(In)) + OGR_G_AddPoint(Ogr_geometry, Points->x[0], Points->y[0], + Points->z[0]); + else + OGR_G_AddPoint_2D(Ogr_geometry, Points->x[0], Points->y[0]); + } + else { + /* GV_LINES -> wkbLinestring */ + int j; + for (j = 0; j < Points->n_points; j++) { + if (Vect_is_3d(In)) + OGR_G_AddPoint(Ogr_geometry, Points->x[j], Points->y[j], + Points->z[j]); + else + OGR_G_AddPoint_2D(Ogr_geometry, Points->x[j], Points->y[j]); + } + } + } + + return Ogr_geometry; +} diff --git a/vector/v.out.ogr/local_proto.h b/vector/v.out.ogr/local_proto.h index a0406a350a1..07921ece8f3 100644 --- a/vector/v.out.ogr/local_proto.h +++ b/vector/v.out.ogr/local_proto.h @@ -12,7 +12,7 @@ struct Options { struct Option *input, *dsn, *layer, *type, *format, *field, *dsco, *lco, - *otype; + *otype, *method; }; struct Flags { @@ -23,10 +23,14 @@ struct Flags { /* args.c */ void parse_args(int, char **, struct Options *, struct Flags *); -/* attributes.c */ +/* attrb.c */ int mk_att(int, struct field_info *, dbDriver *, int, int *, const char **, int, int, OGRFeatureH, int *); +/* attrb_fast.c */ +int mk_att_fast(int, struct field_info *, int, int *, const char **, int, int, + OGRFeatureH, int *, dbCursor *, int *, int *, int); + /* dsn.c */ char *get_datasource_name(const char *, int); @@ -46,7 +50,18 @@ int export_lines(struct Map_info *, int, int, int, int, int, OGRFeatureDefnH, OGRLayerH, struct field_info *, dbDriver *, int, int *, const char **, int, int, int *, int *); +/* export_lines_fast.c */ +int export_lines_fast(struct Map_info *, int, int, int, int, int, + OGRFeatureDefnH, OGRLayerH, struct field_info *, + dbDriver *, int, int *, const char **, int, int, int *, + int *); + /* export_areas.c */ int export_areas(struct Map_info *, int, int, int, OGRFeatureDefnH, OGRLayerH, struct field_info *, dbDriver *, int, int *, const char **, int, int, int *, int *, int); + +/* export_areas_fast.c */ +int export_areas_fast(struct Map_info *, int, int, int, OGRFeatureDefnH, + OGRLayerH, struct field_info *, dbDriver *, int, int *, + const char **, int, int, int *, int *, int); diff --git a/vector/v.out.ogr/main.c b/vector/v.out.ogr/main.c index 788d27a3665..b0c2b34419d 100644 --- a/vector/v.out.ogr/main.c +++ b/vector/v.out.ogr/main.c @@ -820,11 +820,20 @@ int main(int argc, char *argv[]) Vect_get_num_primitives(&In, otype)), Vect_get_num_primitives(&In, otype)); - n_feat += export_lines( - &In, field, otype, flags.multi->answer ? TRUE : FALSE, donocat, - ftype == GV_BOUNDARY ? TRUE : FALSE, Ogr_featuredefn, Ogr_layer, Fi, - Driver, ncol, colctype, colname, doatt, - flags.nocat->answer ? TRUE : FALSE, &n_noatt, &n_nocat); + if (strcmp(options.method->answer, "slow") == 0) { + n_feat += export_lines( + &In, field, otype, flags.multi->answer ? TRUE : FALSE, donocat, + ftype == GV_BOUNDARY ? TRUE : FALSE, Ogr_featuredefn, Ogr_layer, + Fi, Driver, ncol, colctype, colname, doatt, + flags.nocat->answer ? TRUE : FALSE, &n_noatt, &n_nocat); + } + else { + n_feat += export_lines_fast( + &In, field, otype, flags.multi->answer ? TRUE : FALSE, donocat, + ftype == GV_BOUNDARY ? TRUE : FALSE, Ogr_featuredefn, Ogr_layer, + Fi, Driver, ncol, colctype, colname, doatt, + flags.nocat->answer ? TRUE : FALSE, &n_noatt, &n_nocat); + } } /* Areas (run always to count features of different type) */ @@ -834,11 +843,20 @@ int main(int argc, char *argv[]) Vect_get_num_areas(&In)), Vect_get_num_areas(&In)); - n_feat += export_areas(&In, field, flags.multi->answer ? TRUE : FALSE, - donocat, Ogr_featuredefn, Ogr_layer, Fi, Driver, - ncol, colctype, colname, doatt, - flags.nocat->answer ? TRUE : FALSE, &n_noatt, - &n_nocat, outer_ring_ccw); + if (strcmp(options.method->answer, "slow") == 0) { + n_feat += export_areas( + &In, field, flags.multi->answer ? TRUE : FALSE, donocat, + Ogr_featuredefn, Ogr_layer, Fi, Driver, ncol, colctype, colname, + doatt, flags.nocat->answer ? TRUE : FALSE, &n_noatt, &n_nocat, + outer_ring_ccw); + } + else { + n_feat += export_areas_fast( + &In, field, flags.multi->answer ? TRUE : FALSE, donocat, + Ogr_featuredefn, Ogr_layer, Fi, Driver, ncol, colctype, colname, + doatt, flags.nocat->answer ? TRUE : FALSE, &n_noatt, &n_nocat, + outer_ring_ccw); + } } /* diff --git a/vector/v.out.ogr/v.out.ogr.html b/vector/v.out.ogr/v.out.ogr.html index 664a5c35a1a..e97d194281d 100644 --- a/vector/v.out.ogr/v.out.ogr.html +++ b/vector/v.out.ogr/v.out.ogr.html @@ -13,15 +13,15 @@ <h2>DESCRIPTION</h2> The OGR library supports many various formats including: <ul> - <li><a href="https://gdal.org/drv_geopackage.html">OGC GeoPackage</a></li> - <li><a href="https://gdal.org/drv_shapefile.html">ESRI Shapefile</a></li> - <li><a href="https://gdal.org/drv_pg.html">PostGIS</a></li> - <li><a href="https://gdal.org/drv_sqlite.html">SpatiaLite</a></li> - <li><a href="https://gdal.org/drv_csv.html">CSV</a></li> - <li><a href="https://gdal.org/drv_gml.html">GML</a></li> - <li><a href="https://gdal.org/drv_kml.html">KML</a></li> - <li><a href="https://gdal.org/drv_mitab.html">Mapinfo File</a></li> - <li><a href="https://gdal.org/drv_tiger.html">TIGER</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/gpkg.html">OGC GeoPackage</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/shapefile.html">ESRI Shapefile</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/pg.html">PostGIS</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/sqlite.html">SpatiaLite</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/csv.html">CSV</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/gml.html">GML</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/kml.html">KML</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/mitab.html">Mapinfo File</a></li> + <li><a href="https://gdal.org/en/stable/drivers/vector/tiger.html">TIGER</a></li> <li>... and many others</li> </ul> @@ -29,7 +29,7 @@ <h2>DESCRIPTION</h2> <p> For further available other supported formats go -<a href="https://gdal.org/drivers/vector/">here</a>. +<a href="https://gdal.org/en/stable/drivers/vector/">here</a>. <h2>NOTES</h2> @@ -63,7 +63,7 @@ <h2>NOTES</h2> <p> Performance: export to SQLite based formats including OGC GeoPackage may -become faster with the environmental variable <tt>OGR_SQLITE_CACHE=1024</tt> +become faster with the environmental variable <code>OGR_SQLITE_CACHE=1024</code> being set (value depends on available RAM, see <a href="https://trac.osgeo.org/gdal/wiki/ConfigOptions#OGR_SQLITE_CACHE">OGR ConfigOptions</a>). diff --git a/vector/v.out.postgis/v.out.postgis.html b/vector/v.out.postgis/v.out.postgis.html index aa9bd459778..0c0906097db 100644 --- a/vector/v.out.postgis/v.out.postgis.html +++ b/vector/v.out.postgis/v.out.postgis.html @@ -6,48 +6,48 @@ <h2>DESCRIPTION</h2> <p> By default GRASS GIS topological features are converted into simple features -(see <a href="http://www.opengeospatial.org/standards/sfa">OGC Simple +(see <a href="https://www.ogc.org/publications/standard/sfa/">OGC Simple Feature Access</a> specification for details). Flag <b>-l</b> allows to export vector features as topological elements stored -in <a href="http://postgis.refractions.net/docs/Topology.html">PostGIS +in <a href="https://postgis.net/docs/Topology.html">PostGIS Topology</a> schema. Note that topological export requires PostGIS version 2 or later. <p> Additional creation options can be defined by <b>options</b> parameter: <ul> - <li><tt>FID=<column></tt> - name of column which will be used as - primary key (feature id), default: <tt>fid</tt></li> - <li><tt>GEOMETRY_NAME=<column></tt> name of column which will - be used for storing geometry data in feature table, default: <tt>geom</tt></li> - <li><tt>SPATIAL_INDEX=YES|NO</tt> - enable/disable creating spatial - index on geometry column, default: <tt>YES</tt></li> - <li><tt>PRIMARY_KEY=YES|NO</tt> - enable/disable adding primary key - on FID column, default: <tt>YES</tt></li> - <li><tt>SRID=<value></tt> - spatial reference identifier, + <li><code>FID=<column></code> - name of column which will be used as + primary key (feature id), default: <code>fid</code></li> + <li><code>GEOMETRY_NAME=<column></code> name of column which will + be used for storing geometry data in feature table, default: <code>geom</code></li> + <li><code>SPATIAL_INDEX=YES|NO</code> - enable/disable creating spatial + index on geometry column, default: <code>YES</code></li> + <li><code>PRIMARY_KEY=YES|NO</code> - enable/disable adding primary key + on FID column, default: <code>YES</code></li> + <li><code>SRID=<value></code> - spatial reference identifier, default: not defined</li> </ul> PostGIS Topology related options (relevant only for <b>-l</b> flag): <ul> - <li><tt>TOPOSCHEMA_NAME=<schema name></tt> - name of PostGIS - Topology schema, default: <tt>topo_<input></tt></li> - <li><tt>TOPOGEOM_NAME=<column></tt> - name of column which + <li><code>TOPOSCHEMA_NAME=<schema name></code> - name of PostGIS + Topology schema, default: <code>topo_<input></code></li> + <li><code>TOPOGEOM_NAME=<column></code> - name of column which will be used for storing topogeometry data in feature table, - default: <tt>topo</tt></li> - <li><tt>TOPO_TOLERANCE=<value></tt> - tolerance for PostGIS + default: <code>topo</code></li> + <li><code>TOPO_TOLERANCE=<value></code> - tolerance for PostGIS Topology schema, - see <a href="http://www.postgis.net/docs/manual-2.0/CreateTopology.html">CreateTopology</a> - function for defails, default: <tt>0</tt></li> - <li><tt>TOPO_GEO_ONLY=YES|NO</tt> - store in PostGIS Topology schema + see <a href="https://postgis.net/docs/CreateTopology.html">CreateTopology</a> + function for details, default: <code>0</code></li> + <li><code>TOPO_GEO_ONLY=YES|NO</code> - store in PostGIS Topology schema only data relevant to Topo-Geo data model, - default: <tt>NO</tt></li> + default: <code>NO</code></li> </ul> <p> Creation <b>options</b> are comma-separated pairs -(<tt>key=value</tt>), the options are case-insensitive. Note +(<code>key=value</code>), the options are case-insensitive. Note that <b>options</b> defined by <em><a href="v.external.out.html">v.external.out</a></em> are ignored by <em>v.out.postgis</em>. @@ -63,9 +63,9 @@ <h2>NOTES</h2> areas) become polygons, isles become holes. Geometry of simple feature elements is stored in PostGIS feature table in the column named "geom". Name of the geometry column can be changed -by <b>options=</b><tt>GEOMETRY_NAME=<column></tt>. Note that for +by <b>options=</b><code>GEOMETRY_NAME=<column></code>. Note that for exporting vector features as simple features can be alternatively -used <a href="https://gdal.org/drv_pg.html">PostgreSQL driver</a> +used <a href="https://gdal.org/en/stable/drivers/vector/pg.html">PostgreSQL driver</a> from <a href="https://gdal.org/">OGR</a> library through <em><a href="v.out.ogr.html">v.out.ogr</a></em> module. @@ -83,19 +83,19 @@ <h2>NOTES</h2> <em>v.out.postgis</em> currently supports only three basic output simple feature types: Points, Linestrings and Polygons. Also 3D features of the same type are supported, eg. 3D points are exported -as <tt>PointZ</tt> simple feature. Faces are exported as 3D +as <code>PointZ</code> simple feature. Faces are exported as 3D polygons. 3D features are written to the output automatically if input vector map is 3D. If <b>-2</b> flag is given then the output is always 2D (z-coordinate is silently ignored for 3D input vector maps). <p> Multigeometries are not currently supported. Features with the same -category are exported as multiple singe features. +category are exported as multiple single features. <p> <em>v.out.postgis</em> also allows exporting vector features as <em>topological elements</em> -in <a href="http://postgis.refractions.net/docs/Topology.html">PostGIS +in <a href="https://postgis.net/docs/Topology.html">PostGIS Topology</a> schema. PostGIS Topology extension uses three tables to store basic topological elements which forms topological objects like areas or isles in GRASS terminology. <em>Nodes</em> (0-dimensional @@ -116,9 +116,9 @@ <h2>NOTES</h2> Tables <i>node</i>, <i>edge</i> and <i>face</i> are stored in given topological schema. By default <em>v.out.postgis</em> defines its name -as <tt>topo_<input></tt>. Alternatively, the name for topology +as <code>topo_<input></code>. Alternatively, the name for topology schema can be defined -by <b>options=</b><tt>TOPOSCHEMA_NAME=<name></tt>. +by <b>options=</b><code>TOPOSCHEMA_NAME=<name></code>. <h2>EXAMPLES</h2> @@ -154,7 +154,7 @@ <h3>Export Simple Features</h3> OGR library, namely using PostgreSQL driver. Contrary to the <em><a href="v.out.ogr.html">v.out.ogr</a></em> module, <em>v.out.postgis</em> is using directly PostGIS data provider -which is part of GRASS vector engine. Beside +which is part of GRASS vector engine. Besides that, <em>v.out.postgis</em> is optimized for PostGIS export including topological access to the data. @@ -162,7 +162,7 @@ <h3>Export data into specific database schema</h3> Database schema for storing exported data can be defined by <b>output_layer</b> as -<tt><schema_name>.<table_name></tt>. If the specified +<code><schema_name>.<table_name></code>. If the specified schema doesn't exist in the database, then it's automatically created. <p> @@ -177,7 +177,7 @@ <h3>Export data with creation options</h3> Example below demonstrates how to define name for geometry column and disable building spatial index. Spatial reference system is defined -by <tt>srid</tt> identifier which corresponds in this case with EPSG +by <code>srid</code> identifier which corresponds in this case with EPSG 3358 (North Carolina dataset). <div class="code"><pre> @@ -233,7 +233,7 @@ <h3>Export topological data</h3> By default <em>v.out.postgis</em> exports data as simple features. Flag <b>-l</b> allows exporting data as topological elements instead of simple features. Export topological elements is stored in -<a href="http://postgis.refractions.net/docs/Topology.html">PostGIS +<a href="https://postgis.net/docs/Topology.html">PostGIS Topology</a> schema. <div class="code"><pre> @@ -258,14 +258,14 @@ <h2>TODO</h2> <h2>REQUIREMENTS</h2> <ul> - <li>PostGIS 2.x or later for topological export (flag <b>-l</b>) + <li>PostGIS 2.x or later for topological export (flag <b>-l</b>)</li> </ul> <h2>REFERENCES</h2> <ul> - <li><a href="http://www.opengeospatial.org/standards/sfa">OGC Simple Feature Access</a> specification</li> - <li><a href="http://postgis.net/docs/Topology.html">PostGIS Topology</a> documentation</li> + <li><a href="https://www.ogc.org/publications/standard/sfa/">OGC Simple Feature Access</a> specification</li> + <li><a href="https://postgis.net/docs/Topology.html">PostGIS Topology</a> documentation</li> <li><a href="https://grass.osgeo.org/programming8/vlibPg.html">GRASS-PostGIS data provider</a></li> </ul> diff --git a/vector/v.out.vtk/v.out.vtk.html b/vector/v.out.vtk/v.out.vtk.html index 47bae357546..ff4f2945e5a 100644 --- a/vector/v.out.vtk/v.out.vtk.html +++ b/vector/v.out.vtk/v.out.vtk.html @@ -9,12 +9,12 @@ <h2>NOTES</h2> The following vector types can be exported together in one VTK ascii file: <ul> - <li>point</li> - <li>line</li> - <li>centroid</li> - <li>boundary</li> - <li>area</li> - <li>face</li> +<li>point</li> +<li>line</li> +<li>centroid</li> +<li>boundary</li> +<li>area</li> +<li>face</li> </ul> Category data (cat) for the selected vector type and layer will be written as scalar @@ -49,9 +49,9 @@ <h2>NOTES</h2> <li><i>vtk polygons</i> -- representing areas and faces </li> </ul> <p>The VTK file can be visualized with -<em><a href="http://www.vtk.org">VTK Toolkit</a></em>, -<em><a href="http://www.paraview.org">Paraview</a></em> and -<em><a href="http://mayavi.sourceforge.net">MayaVi</a></em>. +<em><a href="https://vtk.org/">VTK Toolkit</a></em>, +<em><a href="https://www.paraview.org/">Paraview</a></em> and +<em><a href="https://github.com/enthought/mayavi">MayaVi</a></em>. <h3>Attention</h3> <p>If areas or faces are exported, the data have to be triangulated within Paraview or diff --git a/vector/v.overlay/v.overlay.html b/vector/v.overlay/v.overlay.html index 0077f2915f5..7069ad58b23 100644 --- a/vector/v.overlay/v.overlay.html +++ b/vector/v.overlay/v.overlay.html @@ -164,13 +164,13 @@ <h3>Overlay operations: AND, OR, NOT, XOR</h3> </pre></div> <center> -<img src="v_overlay_poly_1_2.png" alt="GRASS v.overlay: input polygons (1 and 2)" border=0> +<img src="v_overlay_poly_1_2.png" alt="GRASS v.overlay: input polygons (1 and 2)" border="0"> <br> <i>Figure: v.overlay operations: original input polygons</i> </center> <p> <center> -<img src="v_overlay_poly_1_2_a_o_n_x.png" alt="GRASS v.overlay results: AND, OR, NOT, XOR operations" border=0> +<img src="v_overlay_poly_1_2_a_o_n_x.png" alt="GRASS v.overlay results: AND, OR, NOT, XOR operations" border="0"> <br> <i>Figure: v.overlay results of AND, OR, NOT, XOR operations</i> </center> @@ -219,9 +219,9 @@ <h3>Polygons overlaid with polygons</h3> </pre></div> <center> -<img src="v_overlay_urbanarea.png" alt="GRASS v.overlay: polygon to polygon union (input 1)" border=1> -<img src="v_overlay_census_wake2000.png" alt="GRASS v.overlay: polygon to polygon union (input 2)" border=1> -<img src="v_overlay_urban_census2000.png" alt="GRASS v.overlay: polygon to polygon union (result)" border=1> +<img src="v_overlay_urbanarea.png" alt="GRASS v.overlay: polygon to polygon union (input 1)" border="1"> +<img src="v_overlay_census_wake2000.png" alt="GRASS v.overlay: polygon to polygon union (input 2)" border="1"> +<img src="v_overlay_urban_census2000.png" alt="GRASS v.overlay: polygon to polygon union (result)" border="1"> <br> <i>Figure: v.overlay: Polygon union (right) of urban area (left) and Census 2000 (middle) areas (North Carolina dataset)</i> </center> @@ -247,7 +247,7 @@ <h3>Lines overlaid with polygons</h3> <center> <img src="v_overlay_area_lines.png" alt="GRASS v.overlay: Line to polygon clipping"><br> -<table border=0 width=590> +<table border="0" width="590"> <tr><td><center> <i>Figure: v.overlay: Line to polygon clipping</i> </center></td></tr> diff --git a/vector/v.patch/v.patch.html b/vector/v.patch/v.patch.html index 6e49b5509f4..7d648ecc473 100644 --- a/vector/v.patch/v.patch.html +++ b/vector/v.patch/v.patch.html @@ -13,8 +13,8 @@ <h2>NOTES</h2> editing can be done automatically using <em><a href="v.clean.html">v.clean</a></em>. <p> -Lines may need to be snapped with <em><a -href="v.clean.html">v.clean</a> tool=snap,break,rmdupl</em>. +Lines may need to be snapped with +<em><a href="v.clean.html">v.clean</a> tool=snap,break,rmdupl</em>. <p> Boundaries may need to be cleaned with <em><a href="v.clean.html">v.clean</a> tool=break,rmdupl,rmsa</em> diff --git a/vector/v.proj/v.proj.html b/vector/v.proj/v.proj.html index 62d32bbd958..8552b0327f6 100644 --- a/vector/v.proj/v.proj.html +++ b/vector/v.proj/v.proj.html @@ -66,9 +66,9 @@ <h2>REFERENCES</h2> <li> Evenden, G.I. (1990) <a href="https://proj.org">Cartographic projection procedures for the UNIX environment - a user's manual.</a> USGS Open-File Report 90-284 (OF90-284.pdf) - See also there: Interim Report and 2nd Interim Report on Release 4, Evenden 1994). + See also there: Interim Report and 2nd Interim Report on Release 4, Evenden 1994).</li> <li> Richards, John A. (1993), Remote Sensing Digital Image Analysis, - Springer-Verlag, Berlin, 2nd edition. + Springer-Verlag, Berlin, 2nd edition.</li> </ol> <a href="https://proj.org">PROJ</a>: Projection/datum support library. @@ -76,12 +76,12 @@ <h2>REFERENCES</h2> <p> <b>Further reading</b> <ul> - <li> <a href="https://www.asprs.org/asprs-publications/grids-and-datums">ASPRS Grids and Datum</a> - <li> <a href="http://geotiff.maptools.org/proj_list/">Projections Transform List</a> (PROJ) - <li> <a href="https://proj.org/operations/index.html">Coordinate operations</a> by PROJ (projections, conversions, transformations, pipeline operator) - <li> <a href="http://www.mapref.org">MapRef - - The Collection of Map Projections and Reference Systems for Europe</a> - <li> <a href="http://www.crs-geo.eu">Information and Service System for European Coordinate Reference Systems - CRS</a> + <li> <a href="https://www.asprs.org/asprs-publications/grids-and-datums">ASPRS Grids and Datum</a></li> + <li> <a href="http://geotiff.maptools.org/proj_list/">Projections Transform List</a> (PROJ)</li> + <li> <a href="https://proj.org/operations/index.html">Coordinate operations</a> by PROJ (projections, conversions, transformations, pipeline operator)</li> + <li> <a href="https://mapref.org">MapRef - + The Collection of Map Projections and Reference Systems for Europe</a></li> + <li> <a href="https://www.crs-geo.eu">Information and Service System for European Coordinate Reference Systems - CRS</a></li> </ul> <h2>SEE ALSO</h2> diff --git a/vector/v.random/v.random.html b/vector/v.random/v.random.html index f663a924cfe..abfc25d8c67 100644 --- a/vector/v.random/v.random.html +++ b/vector/v.random/v.random.html @@ -29,14 +29,14 @@ <h3>Restriction to vector areas</h3> <p> Attributes attached to <b>restrict</b> vector map are also transferred -if the <b>layer</b> parameter is defined > 0, +if the <b>layer</b> parameter is defined > 0, see <em><a href="#stratified-random-sampling:-random-sampling-from-vector-map-by-attribute">example</a></em> below. <h2>NOTES</h2> -Importantly, attributes will only be transferred if <b>layer</b> > 0 -(e.g., <tt>layer=1</tt>). +Importantly, attributes will only be transferred if <b>layer</b> > 0 +(e.g., <code>layer=1</code>). <h2>EXAMPLES</h2> @@ -110,7 +110,7 @@ <h3>Generating random points in 3D</h3> <p> <center> -<img src="vrandom_z.png" border=1><br> +<img src="vrandom_z.png" border="1"><br> Random points with different X, Y, and Z coordinates </center> @@ -153,7 +153,7 @@ <h3>Generating random adjacent areas</h3> </pre></div> <p> <center> -<img src="vrandom_polygons.png" border=1><br> +<img src="vrandom_polygons.png" border="1"><br> Random adjacent areas from random points (here: used as centroids) </center> @@ -227,7 +227,7 @@ <h3>Stratified random sampling: Random sampling from vector map by attribute</h3 --> <p> <center> -<img src="vrandom_restricted_attr.png" border=1><br> +<img src="vrandom_restricted_attr.png" border="1"><br> Random points only sampled in forested areas (stratified random sampling) </center> @@ -250,7 +250,7 @@ <h3>Stratified random sampling: Random sampling from vector map with spatial con --> <p> <center> -<img src="vrandom_restricted_area.png" border=1><br> +<img src="vrandom_restricted_area.png" border="1"><br> Two random points sampled in each individual water body (stratified random sampling) </center> diff --git a/vector/v.reclass/v.reclass.html b/vector/v.reclass/v.reclass.html index 4b206db4065..6a75033376c 100644 --- a/vector/v.reclass/v.reclass.html +++ b/vector/v.reclass/v.reclass.html @@ -10,9 +10,9 @@ <h2>DESCRIPTION</h2> keyword value </pre></div> (separated by space) or comment beginning with '#' (hash). -Definition of new category begins with keyword <I>cat</I> followed +Definition of new category begins with keyword <em>cat</em> followed by the new category value. -Keyword <I>where</I> specifies SQL where condition. +Keyword <em>where</em> specifies SQL where condition. <h2>NOTES</h2> @@ -51,12 +51,12 @@ <h3>Example 1: Reclass by rules</h3> <em>land</em> with area category values selected from database by SQL select statement: <br> -<tt>select id from tland where use = 'E13' and owner = 'Jara Cimrman'</tt> +<code>select id from tland where use = 'E13' and owner = 'Jara Cimrman'</code> changed to category 1; <br> values selected from database by SQL select statement: <br> -<tt>select id from tland where use = 'E14'</tt> changed to category 2. +<code>select id from tland where use = 'E14'</code> changed to category 2. <h3>Example 2: Reclass by attribute column</h3> diff --git a/vector/v.rectify/v.rectify.html b/vector/v.rectify/v.rectify.html index a90eefef229..e2cd9d936bf 100644 --- a/vector/v.rectify/v.rectify.html +++ b/vector/v.rectify/v.rectify.html @@ -46,26 +46,26 @@ <h3>Coordinate transformation and RMSE</h3> <p>The desired order of transformation (1, 2, or 3) is selected with the <b>order</b> option. -If the <b>-r</b> flag is given, <em>v.rectify</em> will calculate the +If the <b>-r</b> flag is given, <em>v.rectify</em> will calculate the Root Mean Square Error (RMSE) and print out statistics in tabular format. -The last row gives a summary with the first column holding the number of -active points, followed by average deviations for each dimension and both -forward and backward transformation and finally forward and backward +The last row gives a summary with the first column holding the number of +active points, followed by average deviations for each dimension and both +forward and backward transformation and finally forward and backward overall RMSE. <h4>2D linear affine transformation (1st order transformation)</h4> <dl> - <dd> x' = a1 + b1 * x + c1 * y - <dd> y' = a2 + b2 * x + c2 * y +<dd> x' = a1 + b1 * x + c1 * y +<dd> y' = a2 + b2 * x + c2 * y </dl> <h4>3D linear affine transformation (1st order transformation)</h4> <dl> - <dd> x' = a1 + b1 * x + c1 * y + d1 * z - <dd> y' = a2 + b2 * x + c2 * y + d2 * z - <dd> z' = a3 + b3 * x + c3 * y + d3 * z +<dd> x' = a1 + b1 * x + c1 * y + d1 * z +<dd> y' = a2 + b2 * x + c2 * y + d2 * z +<dd> z' = a3 + b3 * x + c3 * y + d3 * z </dl> The a,b,c,d coefficients are determined by least squares regression @@ -111,7 +111,7 @@ <h2>SEE ALSO</h2> <a href="m.transform.html">m.transform</a>, <a href="r.proj.html">r.proj</a>, <a href="v.proj.html">v.proj</a>, -<a href="v.transform.html">v.transform</a>, +<a href="v.transform.html">v.transform</a> </em> <br> diff --git a/vector/v.segment/v.segment.html b/vector/v.segment/v.segment.html index 43f83abdd44..eda0b89c916 100644 --- a/vector/v.segment/v.segment.html +++ b/vector/v.segment/v.segment.html @@ -1,7 +1,7 @@ <h2>DESCRIPTION</h2> <em>v.segment</em> generates segments or points from input lines and from -positions read from a text file or '<tt>stdin</tt>'. It includes the creation +positions read from a text file or '<code>stdin</code>'. It includes the creation of parallel lines or points in given destination from the line. <p>The format is: @@ -15,7 +15,7 @@ <h2>DESCRIPTION</h2> line. <p> -The user could send to <tt>stdin</tt> something like: +The user could send to <code>stdin</code> something like: <div class="code"><pre> P 1 356 24.56 P 2 495 12.31 diff --git a/vector/v.select/v.select.html b/vector/v.select/v.select.html index 5d32e2fe2e4..f4004a897b0 100644 --- a/vector/v.select/v.select.html +++ b/vector/v.select/v.select.html @@ -5,7 +5,7 @@ <h2>DESCRIPTION</h2> <p>Supported operators (without GEOS; using GRASS' own algorithm): <ul> - <li><b>overlap</b> - features partially or completely overlap (GEOS equivalent: intersects) + <li><b>overlap</b> - features partially or completely overlap (GEOS equivalent: intersects)</li> </ul> Supported operators (internally using @@ -240,7 +240,7 @@ <h3>RELATE feature A is spatially related to feature B (using GEOS)</h3> d.vect map=v_select_TOUCHES </pre></div> -The result of <tt>relate='T********'</tt> is the same as seen +The result of <code>relate='T********'</code> is the same as seen above in the example 'TOUCHES'. See the <a href="https://en.wikipedia.org/wiki/DE-9IM">DE-9IM</a> page diff --git a/vector/v.support/v.support.html b/vector/v.support/v.support.html index 706a04a12a7..565c4fbfe63 100644 --- a/vector/v.support/v.support.html +++ b/vector/v.support/v.support.html @@ -1,6 +1,8 @@ <h2>DESCRIPTION</h2> -<em>v.support</em> is used to set/update vector map metadata. +<em>v.support</em> is used to set/update vector map metadata. While GRASS +GIS typically generates these metadata entries automatically, <em>v.support</em> +allows users to manually edit them when necessary. <h2>EXAMPLE</h2> @@ -16,8 +18,8 @@ <h2>EXAMPLE</h2> <h2>SEE ALSO</h2> <em> - <a href="v.build.html">v.build</a>, - <a href="v.info.html">v.info</a> +<a href="v.build.html">v.build</a>, +<a href="v.info.html">v.info</a> </em> <h2>AUTHOR</h2> diff --git a/vector/v.surf.bspline/v.surf.bspline.html b/vector/v.surf.bspline/v.surf.bspline.html index 1d6199d189e..ea101c9beb7 100644 --- a/vector/v.surf.bspline/v.surf.bspline.html +++ b/vector/v.surf.bspline/v.surf.bspline.html @@ -2,79 +2,78 @@ <h2>DESCRIPTION</h2> <em>v.surf.bspline</em> performs a bilinear/bicubic spline interpolation with Tykhonov regularization. The <b>input</b> is a 2D -or 3D vector <em>points</em> map. Values to interpolate can be the z +or 3D vector <em>point</em> layer. Values to interpolate can be the z values of 3D points or the values in a user-specified attribute column -in a 2D or 3D vector map. Output can be a raster -(<b>raster_output</b>) or vector (<b>output</b>) map. Optionally, a -"sparse point" vector map can be input which indicates the location +in a 2D or 3D vector layer. Output can be a raster +(<b>raster_output</b>) or vector (<b>output</b>) layer. Optionally, a +"sparse point" vector layer can be input which indicates the location of <b>output</b> vector points. <h2>NOTES</h2> <p>From a theoretical perspective, the interpolating procedure takes place in two parts: the first is an estimate of the linear coefficients -of a spline function is derived from the observation points using a -least squares regression; the second is the computation of the -interpolated surface (or interpolated vector points). As used here, the +of a spline function, which is derived from the observation points using a +least squares regression. The second is the computation of the +interpolated surface or interpolated vector points. As used here, the splines are 2D piece-wise non-zero polynomial functions calculated -within a limited, 2D area. The length (in mapping units) of each spline +within a limited, 2D area. The length, in mapping units, of each spline step is defined by <b>ew_step</b> for the east-west direction and <b>ns_step</b> for the north-south direction. For optimal performance, the length of spline step should be no less than the distance between observation points. Each vector point observation is modeled as a linear function of the non-zero splines in the area around the observation. The least squares regression predicts the the coefficients -of these linear functions. Regularization, avoids the need to have one +of these linear functions. Regularization avoids the need to have one observation and one coefficient for each spline (in order to avoid instability). <p>With regularly distributed data points, a spline step corresponding to the maximum distance between two points in both the east and north -directions is sufficient. But often data points are not regularly -distributed and require statistial regularization or estimation. In +directions is sufficient. However, data points are often not regularly +distributed and require statistical regularization or estimation. In such cases, v.surf.bspline will attempt to minimize the gradient of bilinear splines or the curvature of bicubic splines in areas lacking -point observations. As a general rule, spline step length should be +point observations. As a general rule, the spline step length should be greater than the mean distance between observation points (twice the distance between points is a good starting point). Separate east-west and north-south spline step length arguments allows the user to account for some degree of anisotropy in the distribution of -observation points. Short spline step lengths - especially spline step -lengths that are less than the distance between observation points - +observation points. Short spline step lengths, especially spline step +lengths that are less than the distance between observation points, can greatly increase the processing time. -<p>Moreover, the maximum number of splines for each direction at each +<p>The maximum number of splines for each direction at each time is fixed, regardless of the spline step length. As the total -number of splines used increases (i.e., with small spline step +number of splines increases (i.e., with small spline step lengths), the region is automatically split into subregions for interpolation. Each subregion can contain no more than 150x150 splines. To avoid subregion boundary problems, subregions are created to partially overlap each other. A weighted mean of observations, based on point locations, is calculated within each subregion. -<p>The Tykhonov regularization parameter (<b>lambda_i</b>) acts to +<p>The Tykhonov regularization parameter, <b>lambda_i</b>, acts to smooth the interpolation. With a small <b>lambda_i</b>, the interpolated surface closely follows observation points; a larger value will produce a smoother interpolation. -<p>The input can be a 2D or 3D vector points map. If input is 3D +<p>The input can be a 2D or 3D point vector layer. If input is 3D and <b>column</b> is not given than z-coordinates are used for interpolation. Parameter <b>column</b> is required when input is 2D -vector map. - -<p><em>v.surf.bspline</em> can produce a <b>raster_output</b> OR -a <b>output</b> (but NOT simultaneously). Note that topology is not -build for output vector point map. The topology can be built if -required by <em><a href="v.build.html">v.build</a></em>. - -<p>If output is a vector points map and a <b>sparse</b> vector points -map is not specified, the output vector map will contain points at the -same locations as observation points in the input map, but the values -of the output points are interpolated values. If instead -a <b>sparse</b> vector points map is specified, the output vector map -will contain points at the same locations as the sparse vector map -points, and values will be those of the interpolated raster surface at -those points. +vector layer. + +<p><em>v.surf.bspline</em> can produce raster (<b>raster_output</b>) OR +vector <b>output</b> but NOT simultaneously. Note that topology is not +built for output point vector layer. If required, the topology can be built +using <em><a href="v.build.html">v.build</a></em>. + +<p>If output is a point vector layer and <b>sparse</b> is not specified, +the output vector layer will contain points at the +same locations as observation points in the input layer but the values +of the output points will be interpolated values. If a <b>sparse</b> +point vector layer is specified, the output vector layer will contain points +at the same locations as the sparse vector layer points. The values will be +those of the interpolated raster surface at those points. <p>A cross validation "leave-one-out" analysis is available to help to determine the optimal <b>lambda_i</b> value that produces an @@ -96,7 +95,7 @@ <h3>Basic interpolation</h3> v.surf.bspline input=point_vector output=interpolate_surface method=bicubic </pre></div> -A bicubic spline interpolation will be done and a vector points map +A bicubic spline interpolation will be done and a point vector layer with estimated (i.e., interpolated) values will be created. <h3>Basic interpolation and raster output with a longer spline step</h3> @@ -106,7 +105,7 @@ <h3>Basic interpolation and raster output with a longer spline step</h3> </pre></div> A bilinear spline interpolation will be done with a spline step length -of 25 map units. An interpolated raster map will be created at the +of 25 map units. An interpolated raster layer will be created at the current region resolution. <h3>Estimation of lambda_i parameter with a cross validation process</h3> @@ -121,8 +120,8 @@ <h3>Estimation on sparse points</h3> v.surf.bspline input=point_vector sparse=sparse_points output=interpolate_surface </pre></div> -An output map of vector points will be created, corresponding to the -sparse vector map, with interpolated values. +An output layer of vector points will be created, corresponding to the +sparse vector layer, with interpolated values. <h3>Using attribute values instead z-coordinates</h3> @@ -144,13 +143,11 @@ <h3>North Carolina dataset example using z-coordinates for interpolation</h3> <h2>KNOWN ISSUES</h2> -Known issues: - <p> In order to avoid RAM memory problems, an auxiliary table is needed for recording some intermediate calculations. This requires -the <i>GROUP BY</i> SQL function is used, which is not supported by -the DBF driver. For this reason, vector map output +the <i>GROUP BY</i> SQL function is used. This function is not +supported by the DBF driver. For this reason, vector output (<b>output</b>) is not permitted with the DBF driver. There are no problems with the raster map output from the DBF driver. diff --git a/vector/v.surf.rst/v.surf.rst.html b/vector/v.surf.rst/v.surf.rst.html index 199ca4fc3ca..8194aef36d9 100644 --- a/vector/v.surf.rst/v.surf.rst.html +++ b/vector/v.surf.rst/v.surf.rst.html @@ -305,14 +305,14 @@ <h3>Performance</h3> <img src="vsurfrst_benchmark.png" alt="benchmark for v.surf.rst" border="0"> <br> <i>Figure 1: Benchmark shows execution time for different - number of cells (1M, 2M, 4M, and 8M). + number of cells (1M, 2M, 4M, and 8M).</i> </div> <div align="center" style="margin: 10px"> <img src="vsurfrst_cv_benchmark.png" alt="benchmark for cross-validation of v.surf.rst" border="0"> <br> <i>Figure 2: Benchmark shows execution time for running cross-validation on - different number of cells (100k, 200k, 400k, and 800k). + different number of cells (100k, 200k, 400k, and 800k).</i> </div> <h2>EXAMPLE</h2> @@ -355,10 +355,10 @@ <h3>Usage of the where parameter</h3> v.db.univar -e elevrand column=value # interpolation based on subset of points (only those over 1st quartile) -v.surf.rst input=elevrand zcolumn=value elevation=elev_partial npmin=100 where="value > 94.9" +v.surf.rst input=elevrand zcolumn=value elevation=elev_partial npmin=100 where="value > 94.9" r.colors map=elev_partial raster=elevation d.rast elev_partial -d.vect elevrand where="value > 94.9" +d.vect elevrand where="value > 94.9" </pre></div> <h2>REFERENCES</h2> diff --git a/vector/v.to.3d/v.to.3d.html b/vector/v.to.3d/v.to.3d.html index 6053b746a88..1c02859e7b6 100644 --- a/vector/v.to.3d/v.to.3d.html +++ b/vector/v.to.3d/v.to.3d.html @@ -6,17 +6,19 @@ <h2>DESCRIPTION</h2> parameter. <p> -Flag <b>-r</b> enables to perform reverse transformation, i.e., -transform 3D vector to 2D by omitting z-coordinate. Height of input 3D +The flag <b>-r</b> enables to perform reverse transformation, i.e., +transform 3D vector to 2D by omitting z-coordinate. The height of input 3D features can be optionally stored in <b>column</b>. <h2>NOTES</h2> +<p> When transforming 2D vector features to 3D based on attribute, all NULL values are silently converted to height 0.0. + <p> -Reverse transformation is possible for points and lines. -In case of lines, the reverse transformation should be used +The reverse transformation, 2D to 3D, is possible for points and lines. +In the case of lines, the reverse transformation should be used only when all vertices of a line have the same z-coordinate (for example contours). diff --git a/vector/v.to.db/query.c b/vector/v.to.db/query.c index 51856321d31..aecd934aded 100644 --- a/vector/v.to.db/query.c +++ b/vector/v.to.db/query.c @@ -191,6 +191,9 @@ int query(struct Map_info *Map) } db_close_database_shutdown_driver(driver); + Vect_destroy_line_struct(Points); + Vect_destroy_cats_struct(Cats); + Vect_destroy_field_info(Fi); return 0; } diff --git a/vector/v.to.db/v.to.db.html b/vector/v.to.db/v.to.db.html index f0ba460d450..6b32084261d 100644 --- a/vector/v.to.db/v.to.db.html +++ b/vector/v.to.db/v.to.db.html @@ -95,9 +95,9 @@ <h3>Updating attribute tables</h3> <p>Compute D<sub>L</sub>, the Fractal Dimension (Mandelbrot, 1982), of the boundary defining a polygon based on the formula: -<br><tt> +<br><code> D = 2 * (log perimeter) / (log area):<br> -</tt> +</code> <div class="code"><pre> g.copy vect=soils,mysoils v.db.addcolumn mysoils col="d double precision" diff --git a/vector/v.to.rast/v.to.rast.html b/vector/v.to.rast/v.to.rast.html index c5c4e7a54d2..7cbfc68f48a 100644 --- a/vector/v.to.rast/v.to.rast.html +++ b/vector/v.to.rast/v.to.rast.html @@ -27,15 +27,15 @@ <h2>NOTES</h2> <p><em><b>use</b></em> options are: <ul> <li> -<em>attr</em> - read values from attribute table (default) +<em>attr</em> - read values from attribute table (default)</li> <li> -<em>cat</em> - read values from category +<em>cat</em> - read values from category</li> <li> -<em>value</em> - use value specified by <em><b>value</b></em> option +<em>value</em> - use value specified by <em><b>value</b></em> option</li> <li> -<em>z</em> - use z coordinate (points or contours only) +<em>z</em> - use z coordinate (points or contours only)</li> <li> -<em>dir</em> - line direction in degrees counterclockwise from east (lines only) +<em>dir</em> - line direction in degrees counterclockwise from east (lines only)</li> </ul> <p>The <em><b>column</b></em> parameter uses an existing column from the vector map database table as the category value in the output raster map. Existing table @@ -49,13 +49,13 @@ <h2>NOTES</h2> Labeled areas and/or centroids will produce filled raster coverages with edges that straddle the original area boundary <b>as long as the boundary is NOT labeled</b>. -<br>(Use <tt>v.category option=del type=boundary</tt> to remove.) +<br>(Use <code>v.category option=del type=boundary</code> to remove.)</li> <li> Labeled lines and boundaries will produce lines of raster cells which touch the -original vector line. This tends to be more aggressive than area-only conversions. +original vector line. This tends to be more aggressive than area-only conversions.</li> <li> Points and orphaned centroids will be converted into single cells on the -resultant raster map. +resultant raster map.</li> </ul> <p><p><b>Line directions</b> are given in degrees counterclockwise from east. <p><p>Raster category labels are supported for all of <em>use=</em> except <em>use=z</em>. @@ -106,7 +106,7 @@ <h3>Calculate slope along path</h3> </pre></div> <center> -<img src="v_to_rast_direction.png" alt="Slope along path" border=1><br> +<img src="v_to_rast_direction.png" alt="Slope along path" border="1"><br> Slope in degrees along bus route </center> @@ -153,7 +153,7 @@ <h3>Convert vector points to raster with raster cell binning</h3> </pre></div> <center> -<img src="v_to_rast_binning.png" alt="Number of schools per raster cell" border=1><br> +<img src="v_to_rast_binning.png" alt="Number of schools per raster cell" border="1"><br> Number of schools per raster cell </center> diff --git a/vector/v.to.rast3/v.to.rast3.html b/vector/v.to.rast3/v.to.rast3.html index 14bfb0eada0..42a50851491 100644 --- a/vector/v.to.rast3/v.to.rast3.html +++ b/vector/v.to.rast3/v.to.rast3.html @@ -14,7 +14,7 @@ <h2>NOTES</h2> <img src="v_to_rast3_test.png" border="0"><br> <table border="0" width="600"> <tr><td><center> -<i>This screenshot shows the result of the v.to.rast3 test. Visualized +<i>Fig: This screenshot shows the result of the v.to.rast3 test. Visualized are the cube of the GRASS region, the vector points as black dots and the voxel cells as wireframe model. Only cells with non-null values are shown.</i> diff --git a/vector/v.univar/main.c b/vector/v.univar/main.c index 85217498035..5bfa2b672f8 100644 --- a/vector/v.univar/main.c +++ b/vector/v.univar/main.c @@ -358,6 +358,8 @@ void select_from_geometry(void) G_debug(3, "i=%d j=%d sum = %f val=%f", i, j, sum, val); } } + Vect_destroy_line_struct(jPoints); + Vect_destroy_line_struct(iPoints); } void select_from_database(void) diff --git a/vector/v.univar/v.univar.html b/vector/v.univar/v.univar.html index ee9c6f77582..3d886864421 100644 --- a/vector/v.univar/v.univar.html +++ b/vector/v.univar/v.univar.html @@ -27,8 +27,8 @@ <h2>NOTES</h2> <li> <b>type=point</b>: point distances are considered;</li> <li> <b>type=line</b>: line to line distances are considered;</li> <li> <b>type=area</b>: not supported, use <b>type=centroid</b> instead (and see - <a href="v.distance.html">v.distance</a> for calculating distances - between areas)</li> + <a href="v.distance.html">v.distance</a> for calculating distances + between areas)</li> </ul> <h2>EXAMPLES</h2> diff --git a/vector/v.vect.stats/v.vect.stats.html b/vector/v.vect.stats/v.vect.stats.html index adc8ee54f77..109519154c5 100644 --- a/vector/v.vect.stats/v.vect.stats.html +++ b/vector/v.vect.stats/v.vect.stats.html @@ -213,17 +213,17 @@ <h3>Point statistics in a hexagonal grid</h3> d.frame frame=f2 at=50,100,50,100 -c v.colors map=hexagons use=attr column=count color=viridis -d.vect map=hexagons where="count > 0" +d.vect map=hexagons where="count > 0" d.text text="count" at=60,5 size=10 color=black d.frame frame=f3 at=0,50,0,50 -c v.colors map=hexagons use=attr column=average color=viridis -d.vect map=hexagons where="count > 0" +d.vect map=hexagons where="count > 0" d.text text="average" at=60,5 size=10 color=black d.frame frame=f4 at=0,50,50,100 -c v.colors map=hexagons use=attr column=stddev color=viridis -d.vect map=hexagons where="count > 0" +d.vect map=hexagons where="count > 0" d.text text="stddev" at=60,5 size=10 color=black d.mon stop=cairo diff --git a/vector/v.vol.rst/v.vol.rst.html b/vector/v.vol.rst/v.vol.rst.html index 571a162162c..34699390602 100644 --- a/vector/v.vol.rst/v.vol.rst.html +++ b/vector/v.vol.rst/v.vol.rst.html @@ -68,7 +68,7 @@ <h3>SQL support</h3> <div class="code"><pre> # preparation as in above example -v.vol.rst elevrand_3d wcol=soilrange elevation=soilrange zscale=100 where="soilrange > 3" +v.vol.rst elevrand_3d wcol=soilrange elevation=soilrange zscale=100 where="soilrange > 3" </pre></div> <h3>Cross validation procedure</h3> diff --git a/vector/v.voronoi/v.voronoi.html b/vector/v.voronoi/v.voronoi.html index e719c2c0038..fa79d1507a8 100644 --- a/vector/v.voronoi/v.voronoi.html +++ b/vector/v.voronoi/v.voronoi.html @@ -10,7 +10,7 @@ <h2>DESCRIPTION</h2> <p> The <em>-s</em> flag can be used to extract the center line of areas or -skeletons of areas with <em>thin</em> >= 0. Smaller values for the +skeletons of areas with <em>thin</em> >= 0. Smaller values for the <em>thin</em> option will preserve more detail, while negative values will extract only the center line. @@ -91,6 +91,8 @@ <h2>SEE ALSO</h2> <a href="v.delaunay.html">v.delaunay</a>, <a href="v.hull.html">v.hull</a> </em> +<p> +<a href="https://en.wikipedia.org/wiki/Voronoi_diagram">Voronoi diagram (Wikipedia)</a> <h2>AUTHORS</h2> diff --git a/vector/v.what.rast/v.what.rast.html b/vector/v.what.rast/v.what.rast.html index fe067a18138..05c5eddbd89 100644 --- a/vector/v.what.rast/v.what.rast.html +++ b/vector/v.what.rast/v.what.rast.html @@ -34,10 +34,10 @@ <h2>NOTES</h2> geometry instead, use <em><a href="v.drape.html">v.drape</a></em>. <p> Categories and values are output unsorted with the print flag. To sort them -pipe the output of this module into the UNIX <tt>sort</tt> tool -(<tt>sort -n</tt>). If you need coordinates, after sorting use -<em><a href="v.out.ascii.html">v.out.ascii</a></em> and the UNIX <tt>paste</tt> tool -(<tt>paste -d'|'</tt>). In the case of a NULL result, a "<tt>*</tt>" +pipe the output of this module into the UNIX <code>sort</code> tool +(<code>sort -n</code>). If you need coordinates, after sorting use +<em><a href="v.out.ascii.html">v.out.ascii</a></em> and the UNIX <code>paste</code> tool +(<code>paste -d'|'</code>). In the case of a NULL result, a "<code>*</code>" will be printed in lieu of the value. <p> The interpolation flag is only useful for continuous value raster maps, diff --git a/vector/vectorintro.html b/vector/vectorintro.html index db85a0afd37..814526ec8b1 100644 --- a/vector/vectorintro.html +++ b/vector/vectorintro.html @@ -268,8 +268,8 @@ <h3>Geometry operations</h3> Based on the control points, <a href="v.rectify.html">v.rectify</a> rectifies a vector map by computing a coordinate transformation for each vector object. -Triangulation and point-to-polygon conversions can be done with <a -href="v.delaunay.html">v.delaunay</a>, <a href="v.hull.html">v.hull</a>, +Triangulation and point-to-polygon conversions can be done with +<a href="v.delaunay.html">v.delaunay</a>, <a href="v.hull.html">v.hull</a>, and <a href="v.voronoi.html">v.voronoi</a>. The <a href="v.random.html">v.random</a> command generated random points. @@ -310,8 +310,8 @@ <h3>Vector queries</h3> <h3>Vector-Raster queries</h3> Raster values can be transferred to vector maps with - <a href="v.what.rast.html">v.what.rast</a> and -<a href="v.rast.stats">v.rast.stats</a>. +<a href="v.what.rast.html">v.what.rast</a> and +<a href="v.rast.stats.html">v.rast.stats</a>. <h3>Vector network analysis</h3> @@ -322,7 +322,7 @@ <h3>Vector network analysis</h3> <li> Network preparation and maintenance: <a href="v.net.html">v.net</a></li> <li> Shortest path: <a href="d.path.html">d.path</a> and <a href="v.net.path.html">v.net.path</a></li> -<li> Shortest path between all pairs of nodes <a href="v.net.allpairs.html">v.net.allpairs</a> +<li> Shortest path between all pairs of nodes <a href="v.net.allpairs.html">v.net.allpairs</a></li> <li> Allocation of sources (create subnetworks, e.g. police station zones): <a href="v.net.alloc.html">v.net.alloc</a></li> <li> Iso-distances (from centers): <a href="v.net.iso.html">v.net.iso</a></li> @@ -394,4 +394,5 @@ <h3>See also</h3> <li><a href="temporalintro.html">Introduction into temporal data processing</a></li> <li><a href="databaseintro.html">Introduction to database management</a></li> <li><a href="projectionintro.html">Projections and spatial transformations</a></li> + <li><a href="wxguiintro.html">Graphical User Interface</a></li> </ul> diff --git a/visualization/ximgview/ximgview.html b/visualization/ximgview/ximgview.html index c5012ca7d41..1e07dd7eb38 100644 --- a/visualization/ximgview/ximgview.html +++ b/visualization/ximgview/ximgview.html @@ -23,7 +23,7 @@ <h2>EXAMPLE</h2> export GRASS_RENDER_FILE_READ=TRUE d.erase -ximgview $GRASS_RENDER_FILE percent=50 & +ximgview $GRASS_RENDER_FILE percent=50 & d.rast elevation.dem d.vect roads </pre></div> @@ -31,10 +31,14 @@ <h2>EXAMPLE</h2> <h2>SEE ALSO</h2> <em> -<a href="pngdriver.html">PNG driver</a><br> -<a href="cairodriver.html">cairo driver</a><br> +<a href="pngdriver.html">PNG driver</a>, +<a href="cairodriver.html">cairo driver</a>, <a href="wximgview.html">wximgview</a> </em> +<br> +<em> +<a href="variables.html">variables</a> +</em> <h2>AUTHOR</h2>