diff --git a/.config/nextest.toml b/.config/nextest.toml new file mode 100644 index 0000000000..2381e02fdf --- /dev/null +++ b/.config/nextest.toml @@ -0,0 +1,8 @@ +[test-groups] +integration-tests = { max-threads = 1 } + + +[[profile.default.overrides]] +filter = 'package(geoengine-services)' +test-group = 'integration-tests' +slow-timeout = { period = "360s" } diff --git a/.github/actions/test/action.yml b/.github/actions/test/action.yml new file mode 100644 index 0000000000..a31b60e8eb --- /dev/null +++ b/.github/actions/test/action.yml @@ -0,0 +1,6 @@ +name: "Run Tests & Generate Coverage" +description: "Run Geo Engine tests with coverage" +runs: + using: "docker" + image: "docker://quay.io/geoengine/devcontainer:latest" + entrypoint: "./.github/actions/test/test.sh" diff --git a/.github/actions/test/test.sh b/.github/actions/test/test.sh new file mode 100755 index 0000000000..2e251fa0fb --- /dev/null +++ b/.github/actions/test/test.sh @@ -0,0 +1,30 @@ +#!/bin/bash + +function print_headline() { + local BOLD_WHITE_ON_CYAN="\e[1;46;37m" + local BOLD_CYAN="\e[1;49;36m" + local RESET_COLOR="\e[0m" + printf "${BOLD_WHITE_ON_CYAN} ▶ ${BOLD_CYAN} $1 ${RESET_COLOR}\n" >&2 +} + +print_headline "Install cargo-llvm-cov and nextest" +cargo install --locked cargo-llvm-cov +cargo install --locked cargo-nextest + +print_headline "Run Tests & Generate Code Coverage" +service postgresql start +cargo llvm-cov nextest-archive \ + --archive-file tests.tar.zst \ + --cargo-profile ci \ + --locked \ + --all-features +cargo clean +cargo llvm-cov nextest \ + --archive-file tests.tar.zst + --all-features \ + --lcov \ + --output-path lcov.info + +print_headline "Run Doctests" +# cf. https://github.com/taiki-e/cargo-llvm-cov/issues/2 +cargo test --doc --all-features --locked diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 66e339d334..b9d81dd7a6 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,29 +11,11 @@ env: jobs: test: - runs-on: ${{matrix.os}} - - services: - postgres: - image: postgis/postgis - env: - POSTGRES_USER: geoengine - POSTGRES_PASSWORD: geoengine - POSTGRES_DB: geoengine - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - strategy: - matrix: - os: - - ubuntu-22.04 - features: [""] - build: [""] + runs-on: ubuntu-24.04 steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Free Disk Space (Ubuntu) uses: jlumbroso/free-disk-space@main with: @@ -44,22 +26,41 @@ jobs: large-packages: true docker-images: true swap-storage: true - - name: Install lld & GDAL & Protobuf - run: | - sudo apt-get update - sudo apt-get install lld libgdal-dev gdal-bin build-essential clang curl protobuf-compiler - sudo apt-get clean - export C_INCLUDE_PATH=/usr/include/gdal:$C_INCLUDE_PATH - export CPLUS_INCLUDE_PATH=/usr/include/gdal:$CPLUS_INCLUDE_PATH - sudo ldconfig - - name: Install Rustup + # TODO: Get Cache back + # - name: setup rust build cache + # uses: Swatinem/rust-cache@v2 + # with: + # # An explicit cache key that is used instead of the automatic `job`-based + # # cache key and is thus stable across jobs. + # # Default: empty + # shared-key: "" + + # # An additional cache key that is added alongside the automatic `job`-based + # # cache key and can be used to further differentiate jobs. + # # Default: empty + # key: ci_test_ + - name: Testsuite + uses: ./.github/actions/test + - name: Adjust lcov.info + # The lcov file is generated in the container and contains absolute paths, so we need to adjust them. run: | - curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --profile minimal --default-toolchain none -y - echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH - - name: Install SQLFluff - run: pip install sqlfluff==3.3.0 + sed -i 's|/github/workspace/||g' lcov.info + - name: Upload coverage to Coveralls + uses: coverallsapp/github-action@v2 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + file: lcov.info + + lint: + runs-on: ubuntu-24.04 + container: quay.io/geoengine/devcontainer:latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 - name: Init rustup toolchain - run: rustup show # enough to initialize the toolchain + # somehow rustup show will take care to initialize the version based on the toolchain file + run: rustup show - name: setup rust build cache uses: Swatinem/rust-cache@v2 with: @@ -71,64 +72,30 @@ jobs: # An additional cache key that is added alongside the automatic `job`-based # cache key and can be used to further differentiate jobs. # Default: empty - key: ci_test_${{ matrix.os }}_${{ matrix.features }}_${{ matrix.build }} - - # A whitespace separated list of env-var *prefixes* who's value contributes - # to the environment cache key. - # The env-vars are matched by *prefix*, so the default `RUST` var will - # match all of `RUSTC`, `RUSTUP_*`, `RUSTFLAGS`, `RUSTDOC_*`, etc. - # Default: "CARGO CC CFLAGS CXX CMAKE RUST" - #env-vars: "" - - # The cargo workspaces and target directory configuration. - # These entries are separated by newlines and have the form - # `$workspace -> $target`. The `$target` part is treated as a directory - # relative to the `$workspace` and defaults to "target" if not explicitly given. - # Default: ". -> target" - #workspaces: "" - - # Determines if the cache should be saved even when the workflow has failed. - # Default: "false" - #cache-on-failure: "" - - name: Check with Rustfmt + key: ci_test_ + - name: Rustfmt run: cargo fmt --all -- --check - - name: Check with Clippy - run: cargo clippy --all-targets --locked ${{ matrix.features }} ${{ matrix.build }} -- -D warnings - - name: Check with SQLFluff - run: sqlfluff lint - - name: Verify for expression dependencies workspace + - name: Clippy + run: cargo clippy --all-targets --locked -- -D warnings + - name: SQLFluff + run: pipx run sqlfluff==3.3.0 lint + - name: Verifying expression dependencies workspace run: | rustup toolchain install nightly chmod +x ./.scripts/check-expression-deps.rs ./.scripts/check-expression-deps.rs - - name: Run tests - run: cargo test --locked ${{ matrix.features }} ${{ matrix.build }} --verbose build: - runs-on: ${{matrix.os}} + runs-on: ubuntu-24.04 + container: quay.io/geoengine/devcontainer:latest strategy: matrix: - os: - - ubuntu-22.04 - features: [""] build: ["", "--release"] steps: - name: Checkout code - uses: actions/checkout@v3 - - name: APT update - run: sudo apt-get update - - name: Install lld & GDAL & Protobuf - run: | - sudo apt-get install lld libgdal-dev gdal-bin build-essential curl protobuf-compiler - export C_INCLUDE_PATH=/usr/include/gdal:$C_INCLUDE_PATH - export CPLUS_INCLUDE_PATH=/usr/include/gdal:$CPLUS_INCLUDE_PATH - sudo ldconfig - - name: Install Rustup - run: | - curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --profile minimal --default-toolchain none -y - echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH + uses: actions/checkout@v4 - name: Init rustup toolchain # somehow rustup show will take care to initialize the version based on the toolchain file run: rustup show @@ -143,24 +110,6 @@ jobs: # An additional cache key that is added alongside the automatic `job`-based # cache key and can be used to further differentiate jobs. # Default: empty - key: ci_test_${{ matrix.os }}_${{ matrix.features }}_${{ matrix.build }} - - # A whitespace separated list of env-var *prefixes* who's value contributes - # to the environment cache key. - # The env-vars are matched by *prefix*, so the default `RUST` var will - # match all of `RUSTC`, `RUSTUP_*`, `RUSTFLAGS`, `RUSTDOC_*`, etc. - # Default: "CARGO CC CFLAGS CXX CMAKE RUST" - #env-vars: "" - - # The cargo workspaces and target directory configuration. - # These entries are separated by newlines and have the form - # `$workspace -> $target`. The `$target` part is treated as a directory - # relative to the `$workspace` and defaults to "target" if not explicitly given. - # Default: ". -> target" - #workspaces: "" - - # Determines if the cache should be saved even when the workflow has failed. - # Default: "false" - #cache-on-failure: "" + key: ci_test_${{ matrix.build }} - name: Build - run: cargo build --locked ${{ matrix.features }} ${{ matrix.build }} --verbose + run: cargo build --locked ${{ matrix.build }} --verbose diff --git a/.github/workflows/clear-cache.yml b/.github/workflows/clear-cache.yml index 79212cb41d..e3adb4f20e 100644 --- a/.github/workflows/clear-cache.yml +++ b/.github/workflows/clear-cache.yml @@ -9,7 +9,7 @@ env: jobs: clear-cache: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Clear all caches @@ -21,7 +21,7 @@ jobs: const [owner, repo] = GITHUB_REPOSITORY.split('/') console.log(`Clearing all caches for ${owner}/${repo}`) - + let caches = []; do { diff --git a/.github/workflows/containers.yml b/.github/workflows/containers.yml index 24278d2806..4be771ca41 100644 --- a/.github/workflows/containers.yml +++ b/.github/workflows/containers.yml @@ -25,7 +25,7 @@ jobs: create-container: if: github.event_name != 'schedule' || github.repository_owner == 'geo-engine' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 env: TAG_NAME: nightly @@ -54,13 +54,13 @@ jobs: echo "CONTAINER_REPOSITORY_BRANCH=${{ github.event.inputs.container_repository_branch }}" >> $GITHUB_ENV - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.ref_name }} path: geoengine - name: Checkout container files - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: geo-engine/geoengine-container ref: ${{ env.CONTAINER_REPOSITORY_BRANCH }} @@ -98,7 +98,7 @@ jobs: name: Post to a Slack channel in case of failure needs: create-container if: always() - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - name: Post to a Slack channel if: ${{ needs.create-container.result == 'failure' }} diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml deleted file mode 100644 index a43ac1c8cf..0000000000 --- a/.github/workflows/coverage.yml +++ /dev/null @@ -1,115 +0,0 @@ -name: coverage - -on: - pull_request: - # Creates a coverage of the main branch - push: - branches: - - main - merge_group: - # Allows you to run this workflow manually from the Actions tab - workflow_dispatch: - -env: - CARGO_TERM_COLOR: always - -jobs: - coveralls: - runs-on: ${{matrix.os}} - - strategy: - matrix: - os: - - ubuntu-22.04 - features: ["--all-features"] - build: [""] - - services: - postgres: - image: postgis/postgis - env: - POSTGRES_USER: geoengine - POSTGRES_PASSWORD: geoengine - POSTGRES_DB: geoengine - ports: - - 5432:5432 - options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5 - - steps: - - name: Free Disk Space (Ubuntu) - uses: jlumbroso/free-disk-space@main - with: - # this might remove tools that are actually needed, - # if set to "true" but frees about 6 GB - tool-cache: true - - # all of these default to true, but feel free to set to - # "false" if necessary for your workflow - android: true - dotnet: true - haskell: true - # we need clang + this takes some minutes - large-packages: false - # maybe we need swap storage - swap-storage: false - - name: Checkout code - uses: actions/checkout@v3 - - name: APT update - run: sudo apt-get update - - name: Install lld & GDAL & Protobuf - run: | - sudo apt-get install lld libgdal-dev gdal-bin build-essential curl protobuf-compiler - export C_INCLUDE_PATH=/usr/include/gdal:$C_INCLUDE_PATH - export CPLUS_INCLUDE_PATH=/usr/include/gdal:$CPLUS_INCLUDE_PATH - sudo ldconfig - - name: Install Rustup - run: | - curl --proto '=https' --tlsv1.2 --retry 10 --retry-connrefused -fsSL "https://sh.rustup.rs" | sh -s -- --profile minimal --default-toolchain none -y - echo "${CARGO_HOME:-$HOME/.cargo}/bin" >> $GITHUB_PATH - - name: Init rustup toolchain - # somehow rustup show will take care to initialize the version based on the toolchain file - run: rustup show - - name: Install `cargo-llvm-cov` - uses: taiki-e/install-action@cargo-llvm-cov - - name: setup rust build cache - uses: Swatinem/rust-cache@v2 - with: - # An explicit cache key that is used instead of the automatic `job`-based - # cache key and is thus stable across jobs. - # Default: empty - shared-key: "" - - # An additional cache key that is added alongside the automatic `job`-based - # cache key and can be used to further differentiate jobs. - # Default: empty - key: "ci_coverage_${{ matrix.os }}_${{ matrix.features }}_${{ matrix.build }}" - - # A whitespace separated list of env-var *prefixes* who's value contributes - # to the environment cache key. - # The env-vars are matched by *prefix*, so the default `RUST` var will - # match all of `RUSTC`, `RUSTUP_*`, `RUSTFLAGS`, `RUSTDOC_*`, etc. - # Default: "CARGO CC CFLAGS CXX CMAKE RUST" - #env-vars: "" - - # The cargo workspaces and target directory configuration. - # These entries are separated by newlines and have the form - # `$workspace -> $target`. The `$target` part is treated as a directory - # relative to the `$workspace` and defaults to "target" if not explicitly given. - # Default: ". -> target" - #workspaces: "" - - # Determines if the cache should be saved even when the workflow has failed. - # Default: "false" - #cache-on-failure: "" - - name: Generate code coverage - run: | - cargo llvm-cov \ - --locked \ - ${{ matrix.features }} \ - --lcov \ - --output-path lcov.info - - name: Upload coverage to Coveralls - uses: coverallsapp/github-action@v2 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - path-to-lcov: lcov.info diff --git a/.github/workflows/devcontainer.yml b/.github/workflows/devcontainer.yml new file mode 100644 index 0000000000..6579ae00c3 --- /dev/null +++ b/.github/workflows/devcontainer.yml @@ -0,0 +1,84 @@ +name: Create dev container + +on: + # run every Monday at 4:00 AM + schedule: + - cron: "0 4 * * 1" + + # schedule manually + workflow_dispatch: + inputs: + # On workflow dispatch, `branch` is selected by default + # You can access it in `github.ref_name` + + tag_name: + description: "Tag name for the container" + required: true + default: "latest" + + container_repository_branch: + description: "Branch of the container repository" + required: true + default: "main" + +jobs: + create-container: + if: github.event_name != 'schedule' || github.repository_owner == 'geo-engine' + + runs-on: ubuntu-24.04 + + env: + TAG_NAME: latest + CONTAINER_REPOSITORY_BRANCH: main + + steps: + - name: Modify TAG_NAME if on `tag_name` is set on `workflow_dispatch` + if: github.event.inputs.tag_name != '' + run: | + echo "TAG_NAME=${{ github.event.inputs.tag_name }}" >> $GITHUB_ENV + + - name: Modify CONTAINER_REPOSITORY_BRANCH if on `container_repository_branch` is set on `workflow_dispatch` + if: github.event.inputs.container_repository_branch != '' + run: | + echo "CONTAINER_REPOSITORY_BRANCH=${{ github.event.inputs.container_repository_branch }}" >> $GITHUB_ENV + + - name: Checkout container files + uses: actions/checkout@v4 + with: + repository: geo-engine/geoengine-container + ref: ${{ env.CONTAINER_REPOSITORY_BRANCH }} + ssh-key: ${{ secrets.CONTAINER_GITHUB_TOKEN }} + path: "container" + + - name: Login to quay.io + run: podman login -u="geoengine+bot" -p="${{secrets.QUAY_IO_TOKEN}}" quay.io + + - name: Build with podman + run: | + podman build \ + --tag geoengine:${{env.TAG_NAME}} \ + -f container/devcontainer/Dockerfile \ + . + + - name: Push image to quay.io + run: podman push geoengine:${{env.TAG_NAME}} quay.io/geoengine/geoengine:${{env.TAG_NAME}} + + - name: Push latest with date + if: env.TAG_NAME == 'latest' + run: podman push geoengine:${{env.TAG_NAME}} quay.io/geoengine/geoengine:${{env.TAG_NAME}}-$(date +'%Y-%m-%d') + + notify-slack-on-failure: + name: Post to a Slack channel in case of failure + needs: create-container + if: always() + runs-on: ubuntu-24.04 + steps: + - name: Post to a Slack channel + if: ${{ needs.create-container.result == 'failure' }} + id: slack + uses: slackapi/slack-github-action@v1.23.0 + with: + channel-id: "geoengine-dev-core" + slack-message: "⚠️ The workflow ${{ github.workflow }} in the repository ${{ github.event.repository.name }} FAILED!" + env: + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} diff --git a/Cargo.lock b/Cargo.lock index 04293a0f76..72e82ba451 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -441,15 +441,6 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" -[[package]] -name = "ansi_term" -version = "0.12.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" -dependencies = [ - "winapi", -] - [[package]] name = "anstream" version = "0.6.18" @@ -865,17 +856,6 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "autocfg" version = "1.4.0" @@ -891,7 +871,7 @@ dependencies = [ "anyhow", "arrayvec", "log", - "nom 7.1.3", + "nom", "num-rational", "v_frame", ] @@ -1058,29 +1038,6 @@ dependencies = [ "tokio-postgres", ] -[[package]] -name = "bindgen" -version = "0.58.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f8523b410d7187a43085e7e064416ea32ded16bd0a4e6fc025e21616d01258f" -dependencies = [ - "bitflags 1.3.2", - "cexpr", - "clang-sys", - "clap 2.34.0", - "env_logger", - "lazy_static", - "lazycell", - "log", - "peeking_take_while", - "proc-macro2", - "quote", - "regex", - "rustc-hash 1.1.0", - "shlex", - "which", -] - [[package]] name = "bit_field" version = "0.10.2" @@ -1284,15 +1241,6 @@ dependencies = [ "shlex", ] -[[package]] -name = "cexpr" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27" -dependencies = [ - "nom 5.1.3", -] - [[package]] name = "cfg-expr" version = "0.15.8" @@ -1376,32 +1324,6 @@ dependencies = [ "inout", ] -[[package]] -name = "clang-sys" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" -dependencies = [ - "glob", - "libc", - "libloading", -] - -[[package]] -name = "clap" -version = "2.34.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" -dependencies = [ - "ansi_term", - "atty", - "bitflags 1.3.2", - "strsim 0.8.0", - "textwrap", - "unicode-width", - "vec_map", -] - [[package]] name = "clap" version = "4.5.27" @@ -1421,7 +1343,7 @@ dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.11.1", + "strsim", ] [[package]] @@ -1607,7 +1529,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.27", + "clap", "criterion-plot", "is-terminal", "itertools 0.10.5", @@ -1793,7 +1715,7 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim 0.11.1", + "strsim", "syn 2.0.96", ] @@ -2039,19 +1961,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "env_logger" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" -dependencies = [ - "atty", - "humantime", - "log", - "regex", - "termcolor", -] - [[package]] name = "equivalent" version = "1.0.1" @@ -2569,7 +2478,7 @@ dependencies = [ "postgres-types", "rand", "rayon", - "rustc-hash 2.1.0", + "rustc-hash", "serde", "serde_json", "snafu", @@ -2606,7 +2515,7 @@ dependencies = [ "bb8-postgres", "bytes", "chrono", - "clap 4.5.27", + "clap", "clap_derive", "config", "convert_case 0.7.1", @@ -2890,15 +2799,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.4.0" @@ -3037,12 +2937,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - [[package]] name = "hyper" version = "0.14.32" @@ -3521,7 +3415,7 @@ version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e19b23d53f35ce9f56aebc7d1bb4e6ac1e9c0db7ac85c8d1760c04379edced37" dependencies = [ - "hermit-abi 0.4.0", + "hermit-abi", "libc", "windows-sys 0.59.0", ] @@ -3654,12 +3548,6 @@ dependencies = [ "spin", ] -[[package]] -name = "lazycell" -version = "1.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" - [[package]] name = "lebe" version = "0.5.2" @@ -3773,6 +3661,25 @@ dependencies = [ "redox_syscall", ] +[[package]] +name = "libsqlite3-sys" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" +dependencies = [ + "pkg-config", + "vcpkg", +] + +[[package]] +name = "link-cplusplus" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d240c6f7e1ba3a28b0249f774e6a9dd0175054b52dfbb61b16eb8505c3785c9" +dependencies = [ + "cc", +] + [[package]] name = "linux-raw-sys" version = "0.4.15" @@ -4040,16 +3947,6 @@ dependencies = [ "memoffset", ] -[[package]] -name = "nom" -version = "5.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08959a387a676302eebf4ddbcbc611da04285579f76f88ee0506c63b1a61dd4b" -dependencies = [ - "memchr", - "version_check", -] - [[package]] name = "nom" version = "7.1.3" @@ -4617,12 +4514,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ec91767ecc0a0bbe558ce8c9da33c068066c57ecc8bb8477ef8c1ad3ef77c27" -[[package]] -name = "peeking_take_while" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" - [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -5054,9 +4945,9 @@ dependencies = [ [[package]] name = "proj" -version = "0.22.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "083a6c17c8af7f7be8b6fab4fe9301d98c0b89d52bf2a1124fb0568001e18169" +checksum = "4fee58a47991424a46bd2219d3caeda1804e64a85094a52958ee7810fe9754f7" dependencies = [ "geo-types", "libc", @@ -5067,13 +4958,14 @@ dependencies = [ [[package]] name = "proj-sys" -version = "0.19.1" +version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88e31669da0651e2f0264a161adb78f58de671c26c7297d7c436c6e506b159b1" +checksum = "533a4ed2ab59f7605ecea26db7ed76572d30aed9d2a6a90738bc7f7e7b5a11d8" dependencies = [ - "bindgen", "cmake", "flate2", + "libsqlite3-sys", + "link-cplusplus", "pkg-config", "tar", ] @@ -5240,7 +5132,7 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash 2.1.0", + "rustc-hash", "rustls 0.23.21", "socket2", "thiserror 2.0.11", @@ -5258,7 +5150,7 @@ dependencies = [ "getrandom", "rand", "ring", - "rustc-hash 2.1.0", + "rustc-hash", "rustls 0.23.21", "rustls-pki-types", "slab", @@ -5660,12 +5552,6 @@ version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" -[[package]] -name = "rustc-hash" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" - [[package]] name = "rustc-hash" version = "2.1.0" @@ -6257,12 +6143,6 @@ dependencies = [ "unicode-properties", ] -[[package]] -name = "strsim" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" - [[package]] name = "strsim" version = "0.11.1" @@ -6409,30 +6289,12 @@ dependencies = [ "windows-sys 0.59.0", ] -[[package]] -name = "termcolor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" -dependencies = [ - "winapi-util", -] - [[package]] name = "termtree" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f50febec83f5ee1df3015341d8bd429f2d1cc62bcba7ea2076759d315084683" -[[package]] -name = "textwrap" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "unicode-width", -] - [[package]] name = "thiserror" version = "1.0.69" @@ -7056,12 +6918,6 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" -[[package]] -name = "unicode-width" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" - [[package]] name = "universal-hash" version = "0.5.1" @@ -7239,12 +7095,6 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" -[[package]] -name = "vec_map" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" - [[package]] name = "vergen" version = "9.0.4" @@ -7456,15 +7306,6 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082" -[[package]] -name = "which" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d011071ae14a2f6671d0b74080ae0cd8ebf3a6f8c9589a2cd45f23126fe29724" -dependencies = [ - "libc", -] - [[package]] name = "whoami" version = "1.5.2" diff --git a/Cargo.toml b/Cargo.toml index 5394d9eab3..b6d8a82103 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,9 @@ [workspace] members = ["datatypes", "expression", "macros", "operators", "services"] -exclude = ["expression/deps-workspace"] +exclude = [ + "expression/deps-workspace", # Buggy, cf. https://github.com/rust-lang/cargo/issues/6745 + ".scripts", +] # Edition 2021 defaults to resolver 2, but virtual workspaces default to resolver 1, so we need to set it explicitly. resolver = "2" @@ -11,7 +14,7 @@ authors = [ "Johannes Drönner ", "Michael Mattig ", ] -edition = "2021" +edition = "2024" publish = false license-file = "LICENSE" documentation = "https://docs.geoengine.io" @@ -22,6 +25,10 @@ repository = "https://github.com/geo-engine/geoengine" [profile.release] lto = true +[profile.ci] +inherits = "test" +strip = "debuginfo" + [workspace.lints.clippy] # configure default clippy lints complexity = { level = "warn", priority = 0 } @@ -139,8 +146,8 @@ postgres-types = { version = "0.2", features = [ pretty_assertions = "1.4" prettyplease = "0.2" proc-macro2 = "1.0" -proj = "0.22" # needs to stay fixed to use fixed proj version -proj-sys = "0.19" # needs to stay fixed to use fixed proj version +proj = "0.28" # needs to stay fixed to use fixed proj version +proj-sys = "0.25" # needs to stay fixed to use fixed proj version prost = "0.12" # must be compatible with `aruna-rust-api` pwhash = "1.0" quote = "1.0" diff --git a/datatypes/benches/grid_mapping.rs b/datatypes/benches/grid_mapping.rs index e3eccec612..6ed23a6a65 100644 --- a/datatypes/benches/grid_mapping.rs +++ b/datatypes/benches/grid_mapping.rs @@ -1,6 +1,6 @@ #![allow(clippy::unwrap_used)] // okay in benchmarks -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, black_box, criterion_group, criterion_main}; use geoengine_datatypes::raster::{ Grid, GridIdx, GridIdx1D, GridIdx2D, GridShape, MapIndexedElements, MapIndexedElementsParallel, }; diff --git a/datatypes/benches/grid_updates.rs b/datatypes/benches/grid_updates.rs index afa6447747..59c602ffd1 100644 --- a/datatypes/benches/grid_updates.rs +++ b/datatypes/benches/grid_updates.rs @@ -1,6 +1,6 @@ #![allow(clippy::unwrap_used)] // okay in benchmarks -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, black_box, criterion_group, criterion_main}; use geoengine_datatypes::raster::{ Grid, GridIdx, GridIdx1D, GridIdx2D, GridShape, UpdateIndexedElements, UpdateIndexedElementsParallel, diff --git a/datatypes/benches/masked_grid_mapping.rs b/datatypes/benches/masked_grid_mapping.rs index f004d2e98c..baec6e5021 100644 --- a/datatypes/benches/masked_grid_mapping.rs +++ b/datatypes/benches/masked_grid_mapping.rs @@ -1,6 +1,6 @@ #![allow(clippy::unwrap_used)] // okay in benchmarks -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, black_box, criterion_group, criterion_main}; use geoengine_datatypes::raster::{ GridIdx, GridIdx1D, GridIdx2D, GridShape, MapIndexedElements, MapIndexedElementsParallel, MaskedGrid, diff --git a/datatypes/benches/multi_point_collection.rs b/datatypes/benches/multi_point_collection.rs index c6bb56da5c..c3c2b96636 100755 --- a/datatypes/benches/multi_point_collection.rs +++ b/datatypes/benches/multi_point_collection.rs @@ -1,6 +1,6 @@ #![allow(clippy::unwrap_used)] // okay in benchmarks -use criterion::{black_box, criterion_group, criterion_main, Criterion}; +use criterion::{Criterion, black_box, criterion_group, criterion_main}; use geoengine_datatypes::collections::{ BuilderProvider, FeatureCollectionInfos, FeatureCollectionModifications, GeoFeatureCollectionRowBuilder, MultiPointCollection, diff --git a/datatypes/src/collections/batch_builder.rs b/datatypes/src/collections/batch_builder.rs index b9e7cc4857..7a8e85ddc8 100644 --- a/datatypes/src/collections/batch_builder.rs +++ b/datatypes/src/collections/batch_builder.rs @@ -1,12 +1,12 @@ -use crate::collections::{error, FeatureCollectionError, TypedFeatureCollection}; use crate::collections::{FeatureCollection, VectorDataType}; +use crate::collections::{FeatureCollectionError, TypedFeatureCollection, error}; use crate::primitives::CacheHint; use crate::primitives::{ Coordinate2D, FeatureDataType, Geometry, MultiLineString, MultiPoint, MultiPolygon, NoGeometry, TimeInterval, }; -use crate::util::arrow::ArrowTyped; use crate::util::Result; +use crate::util::arrow::ArrowTyped; use arrow::array::{ ArrayData, ArrayRef, FixedSizeListArray, ListArray, PrimitiveArray, StructArray, }; diff --git a/datatypes/src/collections/data_collection.rs b/datatypes/src/collections/data_collection.rs index 47292dee8b..206c2cd80f 100644 --- a/datatypes/src/collections/data_collection.rs +++ b/datatypes/src/collections/data_collection.rs @@ -39,7 +39,7 @@ impl ExactSizeIterator for NoGeometryIterator {} mod par_iter { use super::*; use rayon::iter::{ - plumbing::Producer, IndexedParallelIterator, IntoParallelIterator, ParallelIterator, + IndexedParallelIterator, IntoParallelIterator, ParallelIterator, plumbing::Producer, }; impl IntoParallelIterator for NoGeometryIterator { diff --git a/datatypes/src/collections/data_types.rs b/datatypes/src/collections/data_types.rs index 86b9d45019..5b8d91a77b 100644 --- a/datatypes/src/collections/data_types.rs +++ b/datatypes/src/collections/data_types.rs @@ -12,8 +12,8 @@ use crate::util::Result; use gdal::vector::OGRwkbGeometryType; use postgres_types::{FromSql, ToSql}; use serde::{Deserialize, Serialize}; -use std::collections::hash_map::Keys; use std::collections::HashMap; +use std::collections::hash_map::Keys; use std::convert::{TryFrom, TryInto}; use std::ops::RangeBounds; diff --git a/datatypes/src/collections/feature_collection.rs b/datatypes/src/collections/feature_collection.rs index bfa08fb749..35b9d69bf1 100644 --- a/datatypes/src/collections/feature_collection.rs +++ b/datatypes/src/collections/feature_collection.rs @@ -2,8 +2,8 @@ use arrow::datatypes::{DataType, Date64Type, Field, Float64Type, Int64Type}; use arrow::error::ArrowError; use arrow::{ array::{ - as_boolean_array, as_primitive_array, as_string_array, Array, ArrayRef, BooleanArray, - ListArray, StructArray, + Array, ArrayRef, BooleanArray, ListArray, StructArray, as_boolean_array, + as_primitive_array, as_string_array, }, buffer::Buffer, }; @@ -28,20 +28,20 @@ use crate::primitives::{ CategoryDataRef, FeatureData, FeatureDataRef, FeatureDataType, FeatureDataValue, FloatDataRef, Geometry, IntDataRef, TextDataRef, TimeInterval, }; -use crate::util::arrow::{downcast_array, ArrowTyped}; -use crate::util::helpers::SomeIter; use crate::util::Result; -use crate::{ - collections::{error, IntoGeometryIterator, VectorDataType, VectorDataTyped}, - operations::reproject::Reproject, -}; +use crate::util::arrow::{ArrowTyped, downcast_array}; +use crate::util::helpers::SomeIter; use crate::{ collections::{FeatureCollectionError, IntoGeometryOptionsIterator}, operations::reproject::CoordinateProjection, }; +use crate::{ + collections::{IntoGeometryIterator, VectorDataType, VectorDataTyped, error}, + operations::reproject::Reproject, +}; use std::iter::FromIterator; -use super::{geo_feature_collection::ReplaceRawArrayCoords, GeometryCollection}; +use super::{GeometryCollection, geo_feature_collection::ReplaceRawArrayCoords}; #[allow(clippy::unsafe_derive_deserialize)] #[derive(Debug, Deserialize, Serialize)] @@ -1851,9 +1851,11 @@ mod tests { ) .unwrap(); - assert!(collection - .rename_columns(&[("foo", "baz"), ("bar", "baz")]) - .is_err()); + assert!( + collection + .rename_columns(&[("foo", "baz"), ("bar", "baz")]) + .is_err() + ); } /// If this test fails, change serialization to JSON (cf. methods below) instead of IPC. diff --git a/datatypes/src/collections/feature_collection_builder.rs b/datatypes/src/collections/feature_collection_builder.rs index 85a5f1f0ce..ec5b52c070 100644 --- a/datatypes/src/collections/feature_collection_builder.rs +++ b/datatypes/src/collections/feature_collection_builder.rs @@ -1,11 +1,11 @@ use crate::collections::batch_builder::RawFeatureCollectionBuilder; -use crate::collections::{error, FeatureCollection, FeatureCollectionError}; +use crate::collections::{FeatureCollection, FeatureCollectionError, error}; use crate::primitives::CacheHint; use crate::primitives::{FeatureDataType, FeatureDataValue, Geometry, TimeInstance, TimeInterval}; +use crate::util::Result; use crate::util::arrow::{ - downcast_dyn_array_builder, downcast_mut_array, padded_buffer_size, ArrowTyped, + ArrowTyped, downcast_dyn_array_builder, downcast_mut_array, padded_buffer_size, }; -use crate::util::Result; use arrow::array::{ ArrayBuilder, BooleanArray, BooleanBuilder, Date64Builder, Float64Builder, Int64Builder, PrimitiveArray, PrimitiveBuilder, StringArray, StringBuilder, StructArray, StructBuilder, @@ -13,8 +13,8 @@ use arrow::array::{ }; use arrow::datatypes::{ArrowPrimitiveType, Date64Type, Field, Float64Type, Int64Type, UInt8Type}; use snafu::ensure; -use std::collections::hash_map::Entry; use std::collections::HashMap; +use std::collections::hash_map::Entry; use std::iter; use std::marker::PhantomData; diff --git a/datatypes/src/collections/geo_feature_collection.rs b/datatypes/src/collections/geo_feature_collection.rs index 78e9ddfe77..b7459f1740 100644 --- a/datatypes/src/collections/geo_feature_collection.rs +++ b/datatypes/src/collections/geo_feature_collection.rs @@ -1,6 +1,6 @@ use crate::primitives::{BoundingBox2D, Coordinate2D, Geometry, GeometryRef, TimeInterval}; -use crate::util::arrow::ArrowTyped; use crate::util::Result; +use crate::util::arrow::ArrowTyped; use arrow::array::StructArray; use arrow::{ array::{Array, ArrayData}, diff --git a/datatypes/src/collections/multi_line_string_collection.rs b/datatypes/src/collections/multi_line_string_collection.rs index 05b620ab63..2515bac982 100644 --- a/datatypes/src/collections/multi_line_string_collection.rs +++ b/datatypes/src/collections/multi_line_string_collection.rs @@ -4,9 +4,9 @@ use crate::collections::{ GeometryRandomAccess, IntoGeometryIterator, }; use crate::primitives::{Coordinate2D, MultiLineString, MultiLineStringAccess, MultiLineStringRef}; -use crate::util::arrow::{downcast_array, ArrowTyped}; -use crate::util::helpers::indices_for_split_at; use crate::util::Result; +use crate::util::arrow::{ArrowTyped, downcast_array}; +use crate::util::helpers::indices_for_split_at; use arrow::{ array::{Array, ArrayData, FixedSizeListArray, Float64Array, ListArray}, buffer::Buffer, @@ -634,8 +634,8 @@ mod tests { use crate::spatial_reference::{SpatialReference, SpatialReferenceAuthority}; use crate::util::well_known_data::{ - COLOGNE_EPSG_4326, COLOGNE_EPSG_900_913, HAMBURG_EPSG_4326, HAMBURG_EPSG_900_913, - MARBURG_EPSG_4326, MARBURG_EPSG_900_913, + COLOGNE_EPSG_900_913, COLOGNE_EPSG_4326, HAMBURG_EPSG_900_913, HAMBURG_EPSG_4326, + MARBURG_EPSG_900_913, MARBURG_EPSG_4326, }; let from = SpatialReference::epsg_4326(); diff --git a/datatypes/src/collections/multi_point_collection.rs b/datatypes/src/collections/multi_point_collection.rs index 0ed10478dc..c6b31899c5 100755 --- a/datatypes/src/collections/multi_point_collection.rs +++ b/datatypes/src/collections/multi_point_collection.rs @@ -10,7 +10,7 @@ use rayon::{ use crate::primitives::{Coordinate2D, MultiPoint, MultiPointRef}; use crate::util::arrow::downcast_array; -use crate::util::{arrow::ArrowTyped, Result}; +use crate::util::{Result, arrow::ArrowTyped}; use crate::{ collections::{ FeatureCollection, FeatureCollectionInfos, FeatureCollectionIterator, FeatureCollectionRow, @@ -1257,8 +1257,8 @@ mod tests { use crate::spatial_reference::{SpatialReference, SpatialReferenceAuthority}; use crate::util::well_known_data::{ - COLOGNE_EPSG_4326, COLOGNE_EPSG_900_913, HAMBURG_EPSG_4326, HAMBURG_EPSG_900_913, - MARBURG_EPSG_4326, MARBURG_EPSG_900_913, + COLOGNE_EPSG_900_913, COLOGNE_EPSG_4326, HAMBURG_EPSG_900_913, HAMBURG_EPSG_4326, + MARBURG_EPSG_900_913, MARBURG_EPSG_4326, }; let from = SpatialReference::epsg_4326(); @@ -1310,8 +1310,8 @@ mod tests { use crate::spatial_reference::{SpatialReference, SpatialReferenceAuthority}; use crate::util::well_known_data::{ - COLOGNE_EPSG_4326, COLOGNE_EPSG_900_913, HAMBURG_EPSG_4326, HAMBURG_EPSG_900_913, - MARBURG_EPSG_4326, MARBURG_EPSG_900_913, + COLOGNE_EPSG_900_913, COLOGNE_EPSG_4326, HAMBURG_EPSG_900_913, HAMBURG_EPSG_4326, + MARBURG_EPSG_900_913, MARBURG_EPSG_4326, }; let from = SpatialReference::epsg_4326(); diff --git a/datatypes/src/collections/multi_polygon_collection.rs b/datatypes/src/collections/multi_polygon_collection.rs index 8572ee3dd9..ff24afd080 100644 --- a/datatypes/src/collections/multi_polygon_collection.rs +++ b/datatypes/src/collections/multi_polygon_collection.rs @@ -5,11 +5,11 @@ use crate::collections::{ GeometryCollection, GeometryRandomAccess, IntoGeometryIterator, }; use crate::primitives::{Coordinate2D, MultiPolygon, MultiPolygonAccess, MultiPolygonRef}; -use crate::util::helpers::indices_for_split_at; use crate::util::Result; +use crate::util::helpers::indices_for_split_at; use crate::{ primitives::MultiLineString, - util::arrow::{downcast_array, ArrowTyped}, + util::arrow::{ArrowTyped, downcast_array}, }; use arrow::datatypes::ToByteSlice; use arrow::{ @@ -923,8 +923,8 @@ mod tests { use crate::spatial_reference::{SpatialReference, SpatialReferenceAuthority}; use crate::util::well_known_data::{ - COLOGNE_EPSG_4326, COLOGNE_EPSG_900_913, HAMBURG_EPSG_4326, HAMBURG_EPSG_900_913, - MARBURG_EPSG_4326, MARBURG_EPSG_900_913, + COLOGNE_EPSG_900_913, COLOGNE_EPSG_4326, HAMBURG_EPSG_900_913, HAMBURG_EPSG_4326, + MARBURG_EPSG_900_913, MARBURG_EPSG_4326, }; let from = SpatialReference::epsg_4326(); @@ -1056,41 +1056,43 @@ mod tests { let from_geo = MultiPolygonCollection::from(geometries); let collection = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![ - vec![ - vec![ - Coordinate2D::new(-111., 45.), - Coordinate2D::new(-111., 41.), - Coordinate2D::new(-104., 41.), - Coordinate2D::new(-104., 45.), - Coordinate2D::new(-111., 45.), - ], - vec![ - Coordinate2D::new(-110., 44.), - Coordinate2D::new(-110., 42.), - Coordinate2D::new(-105., 42.), - Coordinate2D::new(-105., 44.), - Coordinate2D::new(-110., 44.), - ], - ], - vec![ + vec![ + MultiPolygon::new(vec![ vec![ - Coordinate2D::new(-111., 45.), - Coordinate2D::new(-111., 41.), - Coordinate2D::new(-104., 41.), - Coordinate2D::new(-104., 45.), - Coordinate2D::new(-111., 45.), + vec![ + Coordinate2D::new(-111., 45.), + Coordinate2D::new(-111., 41.), + Coordinate2D::new(-104., 41.), + Coordinate2D::new(-104., 45.), + Coordinate2D::new(-111., 45.), + ], + vec![ + Coordinate2D::new(-110., 44.), + Coordinate2D::new(-110., 42.), + Coordinate2D::new(-105., 42.), + Coordinate2D::new(-105., 44.), + Coordinate2D::new(-110., 44.), + ], ], vec![ - Coordinate2D::new(-110., 44.), - Coordinate2D::new(-110., 42.), - Coordinate2D::new(-105., 42.), - Coordinate2D::new(-105., 44.), - Coordinate2D::new(-110., 44.), + vec![ + Coordinate2D::new(-111., 45.), + Coordinate2D::new(-111., 41.), + Coordinate2D::new(-104., 41.), + Coordinate2D::new(-104., 45.), + Coordinate2D::new(-111., 45.), + ], + vec![ + Coordinate2D::new(-110., 44.), + Coordinate2D::new(-110., 42.), + Coordinate2D::new(-105., 42.), + Coordinate2D::new(-105., 44.), + Coordinate2D::new(-110., 44.), + ], ], - ], - ]) - .unwrap()], + ]) + .unwrap(), + ], vec![Default::default(); 1], Default::default(), CacheHint::default(), diff --git a/datatypes/src/dataset.rs b/datatypes/src/dataset.rs index 265f0ba280..e60d613c9d 100644 --- a/datatypes/src/dataset.rs +++ b/datatypes/src/dataset.rs @@ -1,5 +1,5 @@ use crate::identifier; -use serde::{de::Visitor, Deserialize, Serialize}; +use serde::{Deserialize, Serialize, de::Visitor}; identifier!(DataProviderId); diff --git a/datatypes/src/error.rs b/datatypes/src/error.rs index 47d7995533..c49f84bf25 100644 --- a/datatypes/src/error.rs +++ b/datatypes/src/error.rs @@ -4,7 +4,7 @@ use crate::{ raster::RasterDataType, spatial_reference::SpatialReference, }; -use snafu::{prelude::*, AsErrorSource, ErrorCompat, IntoError}; +use snafu::{AsErrorSource, ErrorCompat, IntoError, prelude::*}; use std::{any::Any, convert::Infallible, path::PathBuf, sync::Arc}; use strum::IntoStaticStr; diff --git a/datatypes/src/machine_learning.rs b/datatypes/src/machine_learning.rs index b4cdf687f1..4f89d13858 100644 --- a/datatypes/src/machine_learning.rs +++ b/datatypes/src/machine_learning.rs @@ -1,8 +1,8 @@ use crate::{ - dataset::{is_invalid_name_char, SYSTEM_NAMESPACE}, + dataset::{SYSTEM_NAMESPACE, is_invalid_name_char}, raster::RasterDataType, }; -use serde::{de::Visitor, Deserialize, Serialize}; +use serde::{Deserialize, Serialize, de::Visitor}; use snafu::Snafu; use std::path::PathBuf; use std::str::FromStr; diff --git a/datatypes/src/operations/image/colorizer.rs b/datatypes/src/operations/image/colorizer.rs index 29c3ddfd5c..54061f91c2 100644 --- a/datatypes/src/operations/image/colorizer.rs +++ b/datatypes/src/operations/image/colorizer.rs @@ -1,8 +1,8 @@ use crate::error::{self, Error}; use crate::operations::image::RgbaTransmutable; use crate::raster::Pixel; -use crate::util::test::TestDefault; use crate::util::Result; +use crate::util::test::TestDefault; use ordered_float::{FloatIsNan, NotNan}; use postgres_types::{FromSql, ToSql}; use serde::{Deserialize, Serialize}; diff --git a/datatypes/src/operations/image/into_lossy.rs b/datatypes/src/operations/image/into_lossy.rs index fe3f515cb6..48b5745324 100644 --- a/datatypes/src/operations/image/into_lossy.rs +++ b/datatypes/src/operations/image/into_lossy.rs @@ -43,11 +43,7 @@ type_cast_lossy_into_impl!(i64, f64); impl LossyInto for bool { /// This function allows transforming booleans to 0/1 `f64`s fn lossy_into(self) -> f64 { - if self { - 1. - } else { - 0. - } + if self { 1. } else { 0. } } } diff --git a/datatypes/src/operations/image/to_png.rs b/datatypes/src/operations/image/to_png.rs index 83b617355c..a7c3619e97 100644 --- a/datatypes/src/operations/image/to_png.rs +++ b/datatypes/src/operations/image/to_png.rs @@ -11,8 +11,8 @@ use crate::{ }; use image::{DynamicImage, ImageBuffer, ImageFormat, RgbaImage}; -use super::colorizer::ColorMapper; use super::RgbaColor; +use super::colorizer::ColorMapper; pub trait ToPng { /// Outputs png bytes of an image of size width x height diff --git a/datatypes/src/operations/reproject.rs b/datatypes/src/operations/reproject.rs index df316fc75e..533e434361 100644 --- a/datatypes/src/operations/reproject.rs +++ b/datatypes/src/operations/reproject.rs @@ -22,7 +22,7 @@ pub trait CoordinateProjection { /// project a set of coords fn project_coordinates>(&self, coords: A) - -> Result>; + -> Result>; fn source_srs(&self) -> SpatialReference; @@ -38,7 +38,7 @@ pub struct CoordinateProjector { impl CoordinateProjection for CoordinateProjector { fn from_known_srs(from: SpatialReference, to: SpatialReference) -> Result { let p = Proj::new_known_crs(&from.proj_string()?, &to.proj_string()?, None) - .ok_or(error::Error::NoCoordinateProjector { from, to })?; + .map_err(|_| error::Error::NoCoordinateProjector { from, to })?; Ok(CoordinateProjector { from, to, p }) } @@ -504,8 +504,8 @@ mod tests { use crate::primitives::{BoundingBox2D, SpatialPartition2D}; use crate::spatial_reference::SpatialReferenceAuthority; use crate::util::well_known_data::{ - COLOGNE_EPSG_4326, COLOGNE_EPSG_900_913, HAMBURG_EPSG_4326, HAMBURG_EPSG_900_913, - MARBURG_EPSG_4326, MARBURG_EPSG_900_913, + COLOGNE_EPSG_900_913, COLOGNE_EPSG_4326, HAMBURG_EPSG_900_913, HAMBURG_EPSG_4326, + MARBURG_EPSG_900_913, MARBURG_EPSG_4326, }; use float_cmp::approx_eq; diff --git a/datatypes/src/plots/box_plot.rs b/datatypes/src/plots/box_plot.rs index 9d8f222722..44be3d6c2a 100644 --- a/datatypes/src/plots/box_plot.rs +++ b/datatypes/src/plots/box_plot.rs @@ -62,7 +62,9 @@ impl BoxPlotAttribute { ensure!( min <= q1 && q1 <= median && median <= q3 && q3 <= max, error::Plot { - details: format!("Illegal box plot values. min: {min}, q1: {q1}, median: {median}, q3: {q3}, max: {max}") + details: format!( + "Illegal box plot values. min: {min}, q1: {q1}, median: {median}, q3: {q3}, max: {max}" + ) } ); diff --git a/datatypes/src/plots/scatter_plot.rs b/datatypes/src/plots/scatter_plot.rs index 8ff29d68c5..80b37114ad 100644 --- a/datatypes/src/plots/scatter_plot.rs +++ b/datatypes/src/plots/scatter_plot.rs @@ -67,8 +67,8 @@ impl Plot for ScatterPlot { #[cfg(test)] mod tests { - use crate::plots::scatter_plot::ScatterPlot; use crate::plots::Plot; + use crate::plots::scatter_plot::ScatterPlot; use crate::primitives::Coordinate2D; #[test] diff --git a/datatypes/src/primitives/bounding_box.rs b/datatypes/src/primitives/bounding_box.rs index 1ed02c8050..1ba726ca84 100644 --- a/datatypes/src/primitives/bounding_box.rs +++ b/datatypes/src/primitives/bounding_box.rs @@ -619,8 +619,8 @@ pub fn bboxes_extent>>( mod tests { use crate::primitives::{ - bounding_box::bboxes_extent, AxisAlignedRectangle, BoundingBox2D, Coordinate2D, - SpatialBounded, + AxisAlignedRectangle, BoundingBox2D, Coordinate2D, SpatialBounded, + bounding_box::bboxes_extent, }; #[test] diff --git a/datatypes/src/primitives/coordinate.rs b/datatypes/src/primitives/coordinate.rs index b21e389004..d843ae599d 100644 --- a/datatypes/src/primitives/coordinate.rs +++ b/datatypes/src/primitives/coordinate.rs @@ -1,4 +1,4 @@ -use crate::util::arrow::{padded_buffer_size, ArrowTyped}; +use crate::util::arrow::{ArrowTyped, padded_buffer_size}; use arrow::array::{ArrayBuilder, BooleanArray, Float64Array, Float64Builder}; use arrow::datatypes::{DataType, Field}; use arrow::error::ArrowError; diff --git a/datatypes/src/primitives/datetime.rs b/datatypes/src/primitives/datetime.rs index c93df18caa..05c804e404 100644 --- a/datatypes/src/primitives/datetime.rs +++ b/datatypes/src/primitives/datetime.rs @@ -586,7 +586,7 @@ impl Sub for DateTime { mod sql { use super::*; use postgres_types::{ - accepts, private::BytesMut, to_sql_checked, FromSql, IsNull, ToSql, Type, + FromSql, IsNull, ToSql, Type, accepts, private::BytesMut, to_sql_checked, }; use std::error::Error; diff --git a/datatypes/src/primitives/feature_data.rs b/datatypes/src/primitives/feature_data.rs index 3299cf2f83..35d8e0a881 100644 --- a/datatypes/src/primitives/feature_data.rs +++ b/datatypes/src/primitives/feature_data.rs @@ -1,8 +1,8 @@ use crate::error; use crate::primitives::TimeInstance; use crate::raster::RasterDataType; -use crate::util::helpers::indices_for_split_at; use crate::util::Result; +use crate::util::helpers::indices_for_split_at; use arrow::buffer::NullBuffer; use arrow_array::{BooleanArray, Date64Array, Float64Array, Int64Array, StringArray}; use gdal::vector::OGRFieldType; @@ -392,7 +392,7 @@ impl<'f> DataRef<'f, f64> for FloatDataRef<'f> { fn is_valid(&self, i: usize) -> bool { self.valid_bitmap .as_ref() - .map_or(true, |bitmap| bitmap.is_valid(i)) + .is_none_or(|bitmap| bitmap.is_valid(i)) } fn has_nulls(&self) -> bool { @@ -677,7 +677,7 @@ impl<'f> DataRef<'f, i64> for IntDataRef<'f> { fn is_valid(&self, i: usize) -> bool { self.valid_bitmap .as_ref() - .map_or(true, |bitmap| bitmap.is_valid(i)) + .is_none_or(|bitmap| bitmap.is_valid(i)) } fn has_nulls(&self) -> bool { @@ -760,7 +760,7 @@ impl<'f> DataRef<'f, bool> for BoolDataRef<'f> { fn is_valid(&self, i: usize) -> bool { self.valid_bitmap .as_ref() - .map_or(true, |bitmap| bitmap.is_valid(i)) + .is_none_or(|bitmap| bitmap.is_valid(i)) } fn has_nulls(&self) -> bool { @@ -952,7 +952,7 @@ impl<'f> DataRef<'f, TimeInstance> for DateTimeDataRef<'f> { fn is_valid(&self, i: usize) -> bool { self.valid_bitmap .as_ref() - .map_or(true, |bitmap| bitmap.is_valid(i)) + .is_none_or(|bitmap| bitmap.is_valid(i)) } fn has_nulls(&self) -> bool { @@ -1135,7 +1135,7 @@ impl<'f> DataRef<'f, u8> for CategoryDataRef<'f> { fn is_valid(&self, i: usize) -> bool { self.valid_bitmap .as_ref() - .map_or(true, |bitmap| bitmap.is_valid(i)) + .is_none_or(|bitmap| bitmap.is_valid(i)) } fn has_nulls(&self) -> bool { @@ -1193,8 +1193,10 @@ impl<'f> CategoryDataRef<'f> { } unsafe fn byte_ptr_to_str<'d>(bytes: *const u8, length: usize) -> &'d str { - let text_ref = slice::from_raw_parts(bytes, length); - str::from_utf8_unchecked(text_ref) + unsafe { + let text_ref = slice::from_raw_parts(bytes, length); + str::from_utf8_unchecked(text_ref) + } } /// A reference to nullable text data @@ -1301,7 +1303,7 @@ impl<'r> DataRef<'r, u8> for TextDataRef<'r> { fn is_valid(&self, i: usize) -> bool { self.valid_bitmap .as_ref() - .map_or(true, |bitmap| bitmap.is_valid(i)) + .is_none_or(|bitmap| bitmap.is_valid(i)) } fn has_nulls(&self) -> bool { diff --git a/datatypes/src/primitives/mod.rs b/datatypes/src/primitives/mod.rs index 5141d21aeb..f89da27d01 100755 --- a/datatypes/src/primitives/mod.rs +++ b/datatypes/src/primitives/mod.rs @@ -21,7 +21,7 @@ mod time_interval; mod time_step; mod ttl; -pub use bounding_box::{bboxes_extent, BoundingBox2D}; +pub use bounding_box::{BoundingBox2D, bboxes_extent}; pub use circle::Circle; pub use coordinate::Coordinate2D; pub use datetime::{DateTime, DateTimeError, DateTimeParseFormat, Duration}; @@ -43,11 +43,11 @@ pub use query_rectangle::{ QueryAttributeSelection, QueryRectangle, RasterQueryRectangle, VectorQueryRectangle, }; pub use spatial_partition::{ - partitions_extent, AxisAlignedRectangle, SpatialPartition2D, SpatialPartitioned, + AxisAlignedRectangle, SpatialPartition2D, SpatialPartitioned, partitions_extent, }; pub use spatial_resolution::SpatialResolution; pub use spatio_temporal_bounded::{SpatialBounded, TemporalBounded}; pub use time_instance::TimeInstance; -pub use time_interval::{time_interval_extent, TimeInterval}; +pub use time_interval::{TimeInterval, time_interval_extent}; pub use time_step::{TimeGranularity, TimeStep, TimeStepIter}; pub use ttl::{CacheExpiration, CacheHint, CacheTtlSeconds}; diff --git a/datatypes/src/primitives/multi_line_string.rs b/datatypes/src/primitives/multi_line_string.rs index 489b5b1510..0456add7c3 100644 --- a/datatypes/src/primitives/multi_line_string.rs +++ b/datatypes/src/primitives/multi_line_string.rs @@ -13,11 +13,11 @@ use wkt::{ToWkt, Wkt}; use crate::collections::VectorDataType; use crate::error::Error; use crate::primitives::{ - error, BoundingBox2D, GeometryRef, MultiPoint, PrimitivesError, TypedGeometry, + BoundingBox2D, GeometryRef, MultiPoint, PrimitivesError, TypedGeometry, error, }; use crate::primitives::{Coordinate2D, Geometry}; -use crate::util::arrow::{downcast_array, padded_buffer_size, ArrowTyped}; use crate::util::Result; +use crate::util::arrow::{ArrowTyped, downcast_array, padded_buffer_size}; /// A trait that allows a common access to lines of `MultiLineString`s and its references pub trait MultiLineStringAccess { diff --git a/datatypes/src/primitives/multi_point.rs b/datatypes/src/primitives/multi_point.rs index 05b5e2c9fd..48bce94bc7 100644 --- a/datatypes/src/primitives/multi_point.rs +++ b/datatypes/src/primitives/multi_point.rs @@ -1,10 +1,10 @@ use super::SpatialBounded; use crate::collections::VectorDataType; use crate::error::Error; -use crate::primitives::{error, BoundingBox2D, GeometryRef, PrimitivesError, TypedGeometry}; +use crate::primitives::{BoundingBox2D, GeometryRef, PrimitivesError, TypedGeometry, error}; use crate::primitives::{Coordinate2D, Geometry}; -use crate::util::arrow::{downcast_array, padded_buffer_size, ArrowTyped}; use crate::util::Result; +use crate::util::arrow::{ArrowTyped, downcast_array, padded_buffer_size}; use arrow::array::{BooleanArray, FixedSizeListArray, Float64Array}; use arrow::error::ArrowError; use fallible_iterator::FallibleIterator; @@ -468,7 +468,9 @@ mod tests { assert!(MultiPoint::new(vec![(0.5, 0.5).into()])?.intersects_bbox(&bbox)); assert!(MultiPoint::new(vec![(1.0, 1.0).into()])?.intersects_bbox(&bbox)); - assert!(MultiPoint::new(vec![(0.5, 0.5).into(), (1.5, 1.5).into()])?.intersects_bbox(&bbox)); + assert!( + MultiPoint::new(vec![(0.5, 0.5).into(), (1.5, 1.5).into()])?.intersects_bbox(&bbox) + ); assert!(!MultiPoint::new(vec![(1.1, 1.1).into()])?.intersects_bbox(&bbox)); assert!( !MultiPoint::new(vec![(-0.1, -0.1).into(), (1.1, 1.1).into()])?.intersects_bbox(&bbox) diff --git a/datatypes/src/primitives/multi_polygon.rs b/datatypes/src/primitives/multi_polygon.rs index bc09a5eb1b..c08e12d670 100644 --- a/datatypes/src/primitives/multi_polygon.rs +++ b/datatypes/src/primitives/multi_polygon.rs @@ -13,11 +13,11 @@ use super::MultiPoint; use crate::collections::VectorDataType; use crate::error::Error; use crate::primitives::{ - error, BoundingBox2D, GeometryRef, MultiLineString, PrimitivesError, TypedGeometry, + BoundingBox2D, GeometryRef, MultiLineString, PrimitivesError, TypedGeometry, error, }; use crate::primitives::{Coordinate2D, Geometry}; -use crate::util::arrow::{downcast_array, padded_buffer_size, ArrowTyped}; use crate::util::Result; +use crate::util::arrow::{ArrowTyped, downcast_array, padded_buffer_size}; use arrow::datatypes::DataType; /// A trait that allows a common access to polygons of `MultiPolygon`s and its references diff --git a/datatypes/src/primitives/spatial_partition.rs b/datatypes/src/primitives/spatial_partition.rs index abb44db01d..fd323c709f 100644 --- a/datatypes/src/primitives/spatial_partition.rs +++ b/datatypes/src/primitives/spatial_partition.rs @@ -3,9 +3,9 @@ use super::Coordinate2D; use super::SpatialResolution; use crate::error; use crate::raster::GridShape2D; +use crate::util::Result; use crate::util::helpers::snap_next; use crate::util::helpers::snap_prev; -use crate::util::Result; use postgres_types::FromSql; use postgres_types::ToSql; use serde::{Deserialize, Serialize}; diff --git a/datatypes/src/primitives/time_interval.rs b/datatypes/src/primitives/time_interval.rs index b0c2d35ba6..8021c0e788 100755 --- a/datatypes/src/primitives/time_interval.rs +++ b/datatypes/src/primitives/time_interval.rs @@ -1,6 +1,6 @@ use crate::primitives::TimeInstance; -use crate::util::arrow::{downcast_array, padded_buffer_size, ArrowTyped}; use crate::util::Result; +use crate::util::arrow::{ArrowTyped, downcast_array, padded_buffer_size}; use crate::{error, util::ranges::value_in_range}; use arrow::array::{Array, ArrayBuilder, BooleanArray, Int64Array}; use arrow::datatypes::{DataType, Field}; diff --git a/datatypes/src/primitives/time_step.rs b/datatypes/src/primitives/time_step.rs index 9d5cc0fc65..dde9fa00ee 100644 --- a/datatypes/src/primitives/time_step.rs +++ b/datatypes/src/primitives/time_step.rs @@ -4,7 +4,7 @@ use std::{cmp::max, convert::TryInto, ops::Add}; use serde::{Deserialize, Serialize}; use postgres_types::{FromSql, ToSql}; -use snafu::{ensure, OptionExt}; +use snafu::{OptionExt, ensure}; use crate::error::{self, Error}; use crate::primitives::TimeInstance; diff --git a/datatypes/src/raster/arrow_conversion.rs b/datatypes/src/raster/arrow_conversion.rs index 430024fe34..6a3487eff4 100644 --- a/datatypes/src/raster/arrow_conversion.rs +++ b/datatypes/src/raster/arrow_conversion.rs @@ -3,8 +3,8 @@ use crate::{raster::RasterDataType, spatial_reference::SpatialReferenceOption, u use arrow::{ array::{Array, ArrayRef, PrimitiveBuilder}, datatypes::{ - Field, Float32Type, Float64Type, Int16Type, Int32Type, Int64Type, Int8Type, Schema, - UInt16Type, UInt32Type, UInt64Type, UInt8Type, + Field, Float32Type, Float64Type, Int8Type, Int16Type, Int32Type, Int64Type, Schema, + UInt8Type, UInt16Type, UInt32Type, UInt64Type, }, ipc::writer::{FileWriter, IpcWriteOptions}, record_batch::RecordBatch, diff --git a/datatypes/src/raster/data_type.rs b/datatypes/src/raster/data_type.rs index e11a6a9d86..5bec0cec11 100644 --- a/datatypes/src/raster/data_type.rs +++ b/datatypes/src/raster/data_type.rs @@ -179,11 +179,14 @@ impl RasterDataType { pub fn from_gdal_data_type(gdal_data_type: GdalDataType) -> Result { match gdal_data_type { + GdalDataType::Int8 => Ok(Self::I8), GdalDataType::UInt8 => Ok(Self::U8), GdalDataType::UInt16 => Ok(Self::U16), GdalDataType::Int16 => Ok(Self::I16), GdalDataType::UInt32 => Ok(Self::U32), GdalDataType::Int32 => Ok(Self::I32), + GdalDataType::Int64 => Ok(Self::I64), + GdalDataType::UInt64 => Ok(Self::U64), GdalDataType::Float32 => Ok(Self::F32), GdalDataType::Float64 => Ok(Self::F64), GdalDataType::Unknown => Err(Error::GdalRasterDataTypeNotSupported), diff --git a/datatypes/src/raster/empty_grid.rs b/datatypes/src/raster/empty_grid.rs index a51c6cf4a3..e9842ee61e 100644 --- a/datatypes/src/raster/empty_grid.rs +++ b/datatypes/src/raster/empty_grid.rs @@ -1,7 +1,7 @@ use super::{ - grid_traits::{ChangeGridBounds, GridShapeAccess}, GridBoundingBox, GridBounds, GridIdx, GridShape, GridShape1D, GridShape2D, GridShape3D, GridSize, GridSpaceToLinearSpace, + grid_traits::{ChangeGridBounds, GridShapeAccess}, }; use crate::util::{ByteSize, Result}; use serde::{Deserialize, Serialize}; diff --git a/datatypes/src/raster/geo_transform.rs b/datatypes/src/raster/geo_transform.rs index b924013552..beba9a1dcd 100644 --- a/datatypes/src/raster/geo_transform.rs +++ b/datatypes/src/raster/geo_transform.rs @@ -2,7 +2,7 @@ use crate::{ primitives::{AxisAlignedRectangle, Coordinate2D, SpatialPartition2D, SpatialResolution}, util::test::TestDefault, }; -use serde::{de, Deserialize, Deserializer, Serialize}; +use serde::{Deserialize, Deserializer, Serialize, de}; use super::{GridBoundingBox2D, GridIdx, GridIdx2D}; diff --git a/datatypes/src/raster/grid.rs b/datatypes/src/raster/grid.rs index 25b1c87f5d..5538917ca2 100644 --- a/datatypes/src/raster/grid.rs +++ b/datatypes/src/raster/grid.rs @@ -1,7 +1,7 @@ use super::{ - grid_traits::{ChangeGridBounds, GridShapeAccess}, GridBoundingBox, GridBounds, GridContains, GridIdx, GridIdx2D, GridIndexAccess, GridIndexAccessMut, GridSize, GridSpaceToLinearSpace, + grid_traits::{ChangeGridBounds, GridShapeAccess}, }; use crate::util::Result; use crate::{error, util::ByteSize}; @@ -267,7 +267,7 @@ impl GridBounds for GridShape3D { } /// Method to generate an `Iterator` over all `GridIdx2D` in `GridBounds` -pub fn grid_idx_iter_2d(bounds: &B) -> impl Iterator +pub fn grid_idx_iter_2d(bounds: &B) -> impl Iterator + use where B: GridBounds, { diff --git a/datatypes/src/raster/grid_or_empty.rs b/datatypes/src/raster/grid_or_empty.rs index 522f7fa76f..5a22943f3b 100644 --- a/datatypes/src/raster/grid_or_empty.rs +++ b/datatypes/src/raster/grid_or_empty.rs @@ -1,11 +1,11 @@ use std::ops::Add; use super::{ + Grid, GridBoundingBox, GridBounds, GridIdx, GridIndexAccess, GridShape, GridShape1D, + GridShape2D, GridShape3D, GridSize, GridSpaceToLinearSpace, Pixel, empty_grid::EmptyGrid, grid_traits::{ChangeGridBounds, GridShapeAccess}, masked_grid::MaskedGrid, - Grid, GridBoundingBox, GridBounds, GridIdx, GridIndexAccess, GridShape, GridShape1D, - GridShape2D, GridShape3D, GridSize, GridSpaceToLinearSpace, Pixel, }; use crate::util::{ByteSize, Result}; diff --git a/datatypes/src/raster/macros_raster_tile.rs b/datatypes/src/raster/macros_raster_tile.rs index 12b8c11f48..6da02de313 100644 --- a/datatypes/src/raster/macros_raster_tile.rs +++ b/datatypes/src/raster/macros_raster_tile.rs @@ -398,18 +398,22 @@ mod tests { 2 ); - assert!(call_bi_generic_raster_tile_2d_same!( - &typed_raster_a, &typed_raster_b, - (a, b) => Ok(first_pixel_add(a, b)), - Err(()) - ) - .is_err()); + assert!( + call_bi_generic_raster_tile_2d_same!( + &typed_raster_a, &typed_raster_b, + (a, b) => Ok(first_pixel_add(a, b)), + Err(()) + ) + .is_err() + ); - assert!(catch_unwind_silent(|| call_bi_generic_raster_tile_2d_same!( - &typed_raster_a, &typed_raster_b, - (a, b) => first_pixel_add(a, b) - )) - .is_err()); + assert!( + catch_unwind_silent(|| call_bi_generic_raster_tile_2d_same!( + &typed_raster_a, &typed_raster_b, + (a, b) => first_pixel_add(a, b) + )) + .is_err() + ); } #[test] @@ -449,12 +453,14 @@ mod tests { 2 ); - assert!(call_bi_generic_raster_tile_2d_staircase!( - &typed_raster_b, &typed_raster_a, - (a, b) => Ok(first_pixel_add(a, b)), - Err(()) - ) - .is_err()); + assert!( + call_bi_generic_raster_tile_2d_staircase!( + &typed_raster_b, &typed_raster_a, + (a, b) => Ok(first_pixel_add(a, b)), + Err(()) + ) + .is_err() + ); assert!( catch_unwind_silent(|| call_bi_generic_raster_tile_2d_staircase!( diff --git a/datatypes/src/raster/mod.rs b/datatypes/src/raster/mod.rs index 7387284125..90e9cbc5e3 100755 --- a/datatypes/src/raster/mod.rs +++ b/datatypes/src/raster/mod.rs @@ -5,8 +5,8 @@ pub use self::data_type::{ pub use self::empty_grid::{EmptyGrid, EmptyGrid1D, EmptyGrid2D, EmptyGrid3D}; pub use self::geo_transform::{GdalGeoTransform, GeoTransform}; pub use self::grid::{ - grid_idx_iter_2d, Grid, Grid1D, Grid2D, Grid3D, GridShape, GridShape1D, GridShape2D, - GridShape3D, + Grid, Grid1D, Grid2D, Grid3D, GridShape, GridShape1D, GridShape2D, GridShape3D, + grid_idx_iter_2d, }; pub use self::grid_bounds::{ GridBoundingBox, GridBoundingBox1D, GridBoundingBox2D, GridBoundingBox3D, @@ -24,8 +24,8 @@ pub use self::operations::{ interpolation::NearestNeighbor, }; pub use self::raster_tile::{ - display_raster_tile_2d, BaseTile, MaterializedRasterTile, MaterializedRasterTile2D, - MaterializedRasterTile3D, RasterTile, RasterTile2D, RasterTile3D, TilesEqualIgnoringCacheHint, + BaseTile, MaterializedRasterTile, MaterializedRasterTile2D, MaterializedRasterTile3D, + RasterTile, RasterTile2D, RasterTile3D, TilesEqualIgnoringCacheHint, display_raster_tile_2d, }; pub use self::tiling::{TileInformation, TilingSpecification, TilingStrategy}; pub use self::typed_raster_conversion::TypedRasterConversion; diff --git a/datatypes/src/raster/no_data_value_grid.rs b/datatypes/src/raster/no_data_value_grid.rs index b1e12d7ebe..6f409c8bca 100644 --- a/datatypes/src/raster/no_data_value_grid.rs +++ b/datatypes/src/raster/no_data_value_grid.rs @@ -2,7 +2,7 @@ use super::{ EmptyGrid, Grid, GridIndexAccess, GridOrEmpty, GridShape1D, GridShape2D, GridShape3D, GridSize, MaskedGrid, }; -use crate::util::{helpers::equals_or_both_nan, Result}; +use crate::util::{Result, helpers::equals_or_both_nan}; pub type NoDataValueGrid1D = NoDataValueGrid; pub type NoDataValueGrid2D = NoDataValueGrid; diff --git a/datatypes/src/raster/operations/convert_data_type.rs b/datatypes/src/raster/operations/convert_data_type.rs index 3920eee7a3..6a084e63ad 100644 --- a/datatypes/src/raster/operations/convert_data_type.rs +++ b/datatypes/src/raster/operations/convert_data_type.rs @@ -1,4 +1,4 @@ -use crate::raster::{masked_grid::MaskedGrid, BaseTile, EmptyGrid, Grid, GridOrEmpty, GridSize}; +use crate::raster::{BaseTile, EmptyGrid, Grid, GridOrEmpty, GridSize, masked_grid::MaskedGrid}; use num_traits::AsPrimitive; use rayon::iter::{IndexedParallelIterator, IntoParallelIterator, ParallelIterator}; diff --git a/datatypes/src/raster/operations/from_index_fn.rs b/datatypes/src/raster/operations/from_index_fn.rs index efd9d267d6..0a0e3f3cd6 100644 --- a/datatypes/src/raster/operations/from_index_fn.rs +++ b/datatypes/src/raster/operations/from_index_fn.rs @@ -445,11 +445,7 @@ mod tests { fn masked_grid_from_linear_index_parallel_option() { let grid_shape = GridShape::from([2, 4]); let masked_grid = MaskedGrid::from_index_fn_parallel(&grid_shape, |i: usize| { - if i % 2 == 0 { - Some(i) - } else { - None - } + if i % 2 == 0 { Some(i) } else { None } }); assert_eq!(masked_grid.shape(), &GridShape::from([2, 4])); let res_values: Vec> = masked_grid.masked_element_deref_iterator().collect(); @@ -464,11 +460,7 @@ mod tests { let grid_shape = GridShape::from([2, 4]); let masked_grid = MaskedGrid::from_index_fn(&grid_shape, |GridIdx([y, x]): GridIdx2D| { let r = y * 10 + x; - if r % 2 == 0 { - Some(r) - } else { - None - } + if r % 2 == 0 { Some(r) } else { None } }); assert_eq!(masked_grid.shape(), &GridShape::from([2, 4])); let res_values: Vec> = masked_grid.masked_element_deref_iterator().collect(); @@ -484,11 +476,7 @@ mod tests { let masked_grid = MaskedGrid::from_index_fn_parallel(&grid_shape, |GridIdx([y, x]): GridIdx2D| { let r = y * 10 + x; - if r % 2 == 0 { - Some(r) - } else { - None - } + if r % 2 == 0 { Some(r) } else { None } }); assert_eq!(masked_grid.shape(), &GridShape::from([2, 4])); let res_values: Vec> = masked_grid.masked_element_deref_iterator().collect(); @@ -504,11 +492,7 @@ mod tests { let grid_or_empty = GridOrEmpty::from_index_fn(&grid_shape, |GridIdx([y, x]): GridIdx2D| { let r = y * 10 + x; - if r % 2 == 0 { - Some(r) - } else { - None - } + if r % 2 == 0 { Some(r) } else { None } }); assert!(grid_or_empty.is_grid()); @@ -531,11 +515,7 @@ mod tests { let grid_or_empty = GridOrEmpty::from_index_fn_parallel(&grid_shape, |GridIdx([y, x]): GridIdx2D| { let r = y * 10 + x; - if r % 2 == 0 { - Some(r) - } else { - None - } + if r % 2 == 0 { Some(r) } else { None } }); assert!(grid_or_empty.is_grid()); @@ -559,11 +539,7 @@ mod tests { let grid_or_empty = GridOrEmpty2D::::from_index_fn(&grid_shape, |GridIdx([y, x]): GridIdx2D| { let r = y * 10 + x; - if r > 100 { - Some(r) - } else { - None - } + if r > 100 { Some(r) } else { None } }); assert!(grid_or_empty.is_empty()); @@ -582,11 +558,7 @@ mod tests { &grid_shape, |GridIdx([y, x]): GridIdx2D| { let r = y * 10 + x; - if r > 100 { - Some(r) - } else { - None - } + if r > 100 { Some(r) } else { None } }, ); diff --git a/datatypes/src/raster/operations/grid_blit.rs b/datatypes/src/raster/operations/grid_blit.rs index eb901549e0..f9cc8f8dc2 100644 --- a/datatypes/src/raster/operations/grid_blit.rs +++ b/datatypes/src/raster/operations/grid_blit.rs @@ -1,7 +1,7 @@ use crate::raster::{ - empty_grid::EmptyGrid, masked_grid::MaskedGrid, BoundedGrid, Grid, Grid1D, Grid2D, Grid3D, - GridBoundingBox, GridBounds, GridIdx, GridIndexAccessMut, GridIntersection, GridOrEmpty, - GridSize, GridSpaceToLinearSpace, + BoundedGrid, Grid, Grid1D, Grid2D, Grid3D, GridBoundingBox, GridBounds, GridIdx, + GridIndexAccessMut, GridIntersection, GridOrEmpty, GridSize, GridSpaceToLinearSpace, + empty_grid::EmptyGrid, masked_grid::MaskedGrid, }; pub trait GridBlit @@ -239,8 +239,8 @@ where #[cfg(test)] mod tests { use crate::raster::{ - masked_grid::{MaskedGrid2D, MaskedGrid3D}, EmptyGrid2D, EmptyGrid3D, Grid, Grid2D, Grid3D, GridBlit, GridBoundingBox, GridIdx, + masked_grid::{MaskedGrid2D, MaskedGrid3D}, }; #[test] diff --git a/datatypes/src/raster/operations/interpolation.rs b/datatypes/src/raster/operations/interpolation.rs index 8c882a4bd3..768076995d 100644 --- a/datatypes/src/raster/operations/interpolation.rs +++ b/datatypes/src/raster/operations/interpolation.rs @@ -249,7 +249,12 @@ mod tests { #[test] #[allow(clippy::float_cmp)] fn bilinear_fn() { - let [(a_x, a_y, a_v), (_b_x, b_y, b_v), (c_x, _c_y, c_v), (_d_x, _d_y, d_v)] = [ + let [ + (a_x, a_y, a_v), + (_b_x, b_y, b_v), + (c_x, _c_y, c_v), + (_d_x, _d_y, d_v), + ] = [ (54.5, 17.041_667, 31.993), (54.5, 17.083_333, 31.911), (54.458_333, 17.041_667, 31.945), diff --git a/datatypes/src/raster/operations/map_elements.rs b/datatypes/src/raster/operations/map_elements.rs index f79bc7d107..aa4a41d0c6 100644 --- a/datatypes/src/raster/operations/map_elements.rs +++ b/datatypes/src/raster/operations/map_elements.rs @@ -491,11 +491,7 @@ mod tests { let scaled_r1 = t1.map_elements(|p| { if let Some(p) = p { - if p == 7 { - Some(p * 2 + 1) - } else { - None - } + if p == 7 { Some(p * 2 + 1) } else { None } } else { None } diff --git a/datatypes/src/raster/operations/map_indexed_elements.rs b/datatypes/src/raster/operations/map_indexed_elements.rs index 642bb3af5f..ea63817acb 100644 --- a/datatypes/src/raster/operations/map_indexed_elements.rs +++ b/datatypes/src/raster/operations/map_indexed_elements.rs @@ -685,11 +685,7 @@ mod tests { ); let r2 = r1.map_indexed_elements( |idx: usize, p: Option| { - if p.is_some() { - None - } else { - Some(idx as i32) - } + if p.is_some() { None } else { Some(idx as i32) } }, ); @@ -808,11 +804,7 @@ mod tests { let r2 = r1.map_indexed_elements_parallel( |idx: usize, p: Option| { - if p.is_some() { - None - } else { - Some(idx as i32) - } + if p.is_some() { None } else { Some(idx as i32) } }, ); diff --git a/datatypes/src/raster/operations/update_indexed_elements.rs b/datatypes/src/raster/operations/update_indexed_elements.rs index d3341a6228..49d5f80d0b 100644 --- a/datatypes/src/raster/operations/update_indexed_elements.rs +++ b/datatypes/src/raster/operations/update_indexed_elements.rs @@ -435,11 +435,7 @@ mod tests { ); r1.update_indexed_elements( |idx: usize, p: Option| { - if p.is_some() { - None - } else { - Some(idx as i32) - } + if p.is_some() { None } else { Some(idx as i32) } }, ); @@ -557,11 +553,7 @@ mod tests { ); r1.update_indexed_elements_parallel( |idx: usize, p: Option| { - if p.is_some() { - None - } else { - Some(idx as i32) - } + if p.is_some() { None } else { Some(idx as i32) } }, ); diff --git a/datatypes/src/raster/raster_tile.rs b/datatypes/src/raster/raster_tile.rs index 6901498dd7..a61f48ade7 100644 --- a/datatypes/src/raster/raster_tile.rs +++ b/datatypes/src/raster/raster_tile.rs @@ -1,8 +1,8 @@ use super::masked_grid::MaskedGrid; use super::{ - grid_or_empty::GridOrEmpty, GeoTransform, GeoTransformAccess, GridBounds, GridIdx2D, - GridIndexAccess, GridShape, GridShape2D, GridShape3D, GridShapeAccess, GridSize, Raster, - TileInformation, + GeoTransform, GeoTransformAccess, GridBounds, GridIdx2D, GridIndexAccess, GridShape, + GridShape2D, GridShape3D, GridShapeAccess, GridSize, Raster, TileInformation, + grid_or_empty::GridOrEmpty, }; use super::{GridIndexAccessMut, RasterProperties}; use crate::primitives::CacheHint; diff --git a/datatypes/src/raster/tiling.rs b/datatypes/src/raster/tiling.rs index 6224d08bc4..50deaf228e 100644 --- a/datatypes/src/raster/tiling.rs +++ b/datatypes/src/raster/tiling.rs @@ -103,7 +103,7 @@ impl TilingStrategy { pub fn tile_idx_iterator( &self, partition: SpatialPartition2D, - ) -> impl Iterator { + ) -> impl Iterator + use<> { let GridIdx([upper_left_tile_y, upper_left_tile_x]) = self.pixel_idx_to_tile_idx(self.geo_transform.upper_left_pixel_idx(&partition)); @@ -121,7 +121,7 @@ impl TilingStrategy { pub fn tile_information_iterator( &self, partition: SpatialPartition2D, - ) -> impl Iterator { + ) -> impl Iterator + use<> { let tile_pixel_size = self.tile_size_in_pixels; let geo_transform = self.geo_transform; self.tile_idx_iterator(partition) diff --git a/datatypes/src/spatial_reference.rs b/datatypes/src/spatial_reference.rs index 8c5a7ba6d8..fd258e469b 100644 --- a/datatypes/src/spatial_reference.rs +++ b/datatypes/src/spatial_reference.rs @@ -88,7 +88,7 @@ impl SpatialReference { pub fn area_of_use(self) -> Result { let proj_string = self.proj_string()?; - let proj = Proj::new(&proj_string).ok_or(error::Error::InvalidProjDefinition { + let proj = Proj::new(&proj_string).map_err(|_| error::Error::InvalidProjDefinition { proj_definition: proj_string.clone(), })?; let area = proj @@ -193,7 +193,7 @@ impl FromStr for SpatialReferenceAuthority { _ => { return Err(error::Error::InvalidSpatialReferenceString { spatial_reference_string: s.into(), - }) + }); } }) } @@ -546,7 +546,9 @@ mod tests { "EPSG:4326" ); assert_eq!( - SpatialReference::new(SpatialReferenceAuthority::SrOrg, 81).proj_string().unwrap(), + SpatialReference::new(SpatialReferenceAuthority::SrOrg, 81) + .proj_string() + .unwrap(), "+proj=geos +lon_0=0 +h=35785831 +x_0=0 +y_0=0 +ellps=WGS84 +units=m +no_defs +type=crs" ); assert_eq!( @@ -561,9 +563,11 @@ mod tests { .unwrap(), "ESRI:42" ); - assert!(SpatialReference::new(SpatialReferenceAuthority::SrOrg, 1) - .proj_string() - .is_err()); + assert!( + SpatialReference::new(SpatialReferenceAuthority::SrOrg, 1) + .proj_string() + .is_err() + ); } #[test] diff --git a/datatypes/src/util/byte_size.rs b/datatypes/src/util/byte_size.rs index a92c827f33..cbf6ba3874 100644 --- a/datatypes/src/util/byte_size.rs +++ b/datatypes/src/util/byte_size.rs @@ -1,6 +1,6 @@ use crate::collections::{FeatureCollection, FeatureCollectionInfos}; use crate::raster::Pixel; -use std::collections::{hash_map::RandomState, HashMap}; +use std::collections::{HashMap, hash_map::RandomState}; /// A trait for types that have a size in bytes /// that it takes up in memory diff --git a/datatypes/src/util/helpers.rs b/datatypes/src/util/helpers.rs index aab3438d88..55d5a543c1 100644 --- a/datatypes/src/util/helpers.rs +++ b/datatypes/src/util/helpers.rs @@ -1,5 +1,5 @@ use rayon::iter::{ - plumbing::Producer, IndexedParallelIterator, IntoParallelIterator, ParallelIterator, + IndexedParallelIterator, IntoParallelIterator, ParallelIterator, plumbing::Producer, }; /// This macro allows comparing float slices using [`float_cmp::approx_eq`]. diff --git a/datatypes/src/util/image.rs b/datatypes/src/util/image.rs index 881cfd9a59..38d9f6826d 100644 --- a/datatypes/src/util/image.rs +++ b/datatypes/src/util/image.rs @@ -16,6 +16,40 @@ pub fn assert_image_equals(expected: &Path, found: &[u8]) { assert_eq!(left, right, "Images differ: {expected:?}"); } +/// Compare two images +/// +/// # Panics +/// - if the `expected` image cannot be loaded +/// - if the `found` bytes cannot be loaded as an image +/// - if the images differ +/// +pub fn assert_image_equals_with_format(expected: &Path, found: &[u8], format: ImageFormat) { + let left_buf = std::fs::read(expected).expect("Failed to read `expected` path"); + let left = image::load_from_memory_with_format(&left_buf, format.into()) + .expect("Failed to make image from `expected` path"); + let right = image::load_from_memory_with_format(found, format.into()) + .expect("Failed to make image from `found` bytes"); + + assert_eq!(left, right, "Images differ: {expected:?}"); +} + +/// Image format +#[derive(Debug, Clone, Copy, PartialEq, Eq)] + +pub enum ImageFormat { + Png, + Tiff, +} + +impl From for image::ImageFormat { + fn from(format: ImageFormat) -> Self { + match format { + ImageFormat::Png => image::ImageFormat::Png, + ImageFormat::Tiff => image::ImageFormat::Tiff, + } + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/datatypes/src/util/mod.rs b/datatypes/src/util/mod.rs index 737e1359db..d9290a1b13 100644 --- a/datatypes/src/util/mod.rs +++ b/datatypes/src/util/mod.rs @@ -15,7 +15,7 @@ pub use self::identifiers::Identifier; pub use any::{AsAny, AsAnyArc}; pub use byte_size::ByteSize; pub use db_types::{HashMapTextTextDbType, NotNanF64, StringPair, TextTextKeyValue}; -pub use image::assert_image_equals; +pub use image::{ImageFormat, assert_image_equals, assert_image_equals_with_format}; pub use result::Result; use std::path::{Path, PathBuf}; diff --git a/datatypes/tests/example-arrow.rs b/datatypes/tests/example-arrow.rs index 53c0f98c7d..b13515bd2c 100755 --- a/datatypes/tests/example-arrow.rs +++ b/datatypes/tests/example-arrow.rs @@ -1,8 +1,8 @@ use arrow::array::{ - downcast_array, Array, ArrayData, AsArray, BooleanArray, Date64Array, Date64Builder, - FixedSizeBinaryBuilder, FixedSizeListArray, FixedSizeListBuilder, Float64Array, Float64Builder, - Int32Array, Int32Builder, Int64Builder, ListArray, ListBuilder, StringArray, StringBuilder, - StructBuilder, UInt64Array, UInt64Builder, + Array, ArrayData, AsArray, BooleanArray, Date64Array, Date64Builder, FixedSizeBinaryBuilder, + FixedSizeListArray, FixedSizeListBuilder, Float64Array, Float64Builder, Int32Array, + Int32Builder, Int64Builder, ListArray, ListBuilder, StringArray, StringBuilder, StructBuilder, + UInt64Array, UInt64Builder, downcast_array, }; use arrow::buffer::Buffer; use arrow::compute::kernels::filter::filter; diff --git a/expression/deps-workspace/Cargo.toml b/expression/deps-workspace/Cargo.toml index 7d08b9389b..8594c10041 100644 --- a/expression/deps-workspace/Cargo.toml +++ b/expression/deps-workspace/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "geoengine-expression-deps" version = "0.0.0" -edition = "2021" +edition = "2024" [lib] path = "lib.rs" diff --git a/expression/src/codegen.rs b/expression/src/codegen.rs index 70b4f8af41..bb20a097fb 100644 --- a/expression/src/codegen.rs +++ b/expression/src/codegen.rs @@ -1,7 +1,7 @@ use super::error::{ExpressionParserError, ExpressionSemanticError}; use crate::functions::Function; use proc_macro2::TokenStream; -use quote::{format_ident, quote, ToTokens}; +use quote::{ToTokens, format_ident, quote}; use std::{collections::BTreeSet, fmt::Debug, hash::Hash}; type Result = std::result::Result; @@ -84,7 +84,7 @@ impl ToTokens for ExpressionAst { let dtype = self.out_type; tokens.extend(quote! { - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn #fn_name (#(#params),*) -> Option<#dtype> { #content } diff --git a/expression/src/compiled.rs b/expression/src/compiled.rs index a7613df085..837a251c28 100644 --- a/expression/src/compiled.rs +++ b/expression/src/compiled.rs @@ -1,9 +1,9 @@ use crate::{ - error::{self, CompilationFailed, Compiler, ExpressionExecutionError}, ExpressionAst, ExpressionDependencies, + error::{self, CompilationFailed, Compiler, ExpressionExecutionError}, }; -use libloading::{library_filename, Library, Symbol}; -use snafu::{ensure, ResultExt}; +use libloading::{Library, Symbol, library_filename}; +use snafu::{ResultExt, ensure}; use std::{ borrow::Cow, fs::File, @@ -78,11 +78,13 @@ impl LinkedExpression { /// #[allow(clippy::type_complexity)] pub unsafe fn function_1(&self) -> Result Option>> { - self.library - .get(self.function_name.as_bytes()) - .context(error::LinkedFunctionNotFound { - name: self.function_name.clone(), - }) + unsafe { + self.library + .get(self.function_name.as_bytes()) + .context(error::LinkedFunctionNotFound { + name: self.function_name.clone(), + }) + } } /// Returns a function with 3 input parameters /// @@ -92,11 +94,13 @@ impl LinkedExpression { /// #[allow(clippy::type_complexity)] pub unsafe fn function_2(&self) -> Result Option>> { - self.library - .get(self.function_name.as_bytes()) - .context(error::LinkedFunctionNotFound { - name: self.function_name.clone(), - }) + unsafe { + self.library + .get(self.function_name.as_bytes()) + .context(error::LinkedFunctionNotFound { + name: self.function_name.clone(), + }) + } } /// Returns an n-ary function @@ -107,11 +111,13 @@ impl LinkedExpression { /// #[allow(clippy::type_complexity)] pub unsafe fn function_nary(&self) -> Result> { - self.library - .get(self.function_name.as_bytes()) - .context(error::LinkedFunctionNotFound { - name: self.function_name.clone(), - }) + unsafe { + self.library + .get(self.function_name.as_bytes()) + .context(error::LinkedFunctionNotFound { + name: self.function_name.clone(), + }) + } } } @@ -146,7 +152,7 @@ fn compile_file( let mut command = Command::new("rustc"); command - .args(["--edition", "2021"]) + .args(["--edition", "2024"]) .args(["--crate-type", "cdylib"]) .args(["-C", "opt-level=3"]) .arg("-L") @@ -194,7 +200,7 @@ mod tests { use geo::{polygon}; use geoengine_expression_deps::*; - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn area_of_polygon() -> Option { let polygon = MultiPolygon::from(polygon![ (x: 0., y: 0.), diff --git a/expression/src/functions.rs b/expression/src/functions.rs index f6f0d0749e..af48bfcca3 100644 --- a/expression/src/functions.rs +++ b/expression/src/functions.rs @@ -3,7 +3,7 @@ use crate::{ error::ExpressionSemanticError, }; use proc_macro2::TokenStream; -use quote::{quote, ToTokens}; +use quote::{ToTokens, quote}; use std::{collections::HashMap, hash::Hash, sync::OnceLock}; type Result = std::result::Result; @@ -240,9 +240,11 @@ pub fn init_functions() -> HashMap<&'static str, FunctionGenerator> { FunctionGenerator { name, generate_fn: |name, args| match args { - [dtype @ (DataType::MultiPoint - | DataType::MultiLineString - | DataType::MultiPolygon)] => Ok(Function { + [ + dtype @ (DataType::MultiPoint + | DataType::MultiLineString + | DataType::MultiPolygon), + ] => Ok(Function { name: unique_name(name, args), signature: vec![*dtype], output_type: DataType::MultiPoint, @@ -288,9 +290,11 @@ pub fn init_functions() -> HashMap<&'static str, FunctionGenerator> { FunctionGenerator { name, generate_fn: |name, args| match args { - [dtype @ (DataType::MultiPoint - | DataType::MultiLineString - | DataType::MultiPolygon)] => Ok(Function { + [ + dtype @ (DataType::MultiPoint + | DataType::MultiLineString + | DataType::MultiPolygon), + ] => Ok(Function { name: unique_name(name, args), signature: vec![*dtype], output_type: DataType::Number, diff --git a/expression/src/parser.rs b/expression/src/parser.rs index 0f062439aa..81b4e374e3 100644 --- a/expression/src/parser.rs +++ b/expression/src/parser.rs @@ -6,19 +6,19 @@ use super::{ Branch, ExpressionAst, Identifier, Parameter, }, error::{self, ExpressionSemanticError}, - functions::{init_functions, FUNCTIONS}, + functions::{FUNCTIONS, init_functions}, util::duplicate_or_empty_str_slice, }; use pest::{ + Parser, iterators::{Pair, Pairs}, pratt_parser::{Assoc, Op, PrattParser}, - Parser, }; use pest_derive::Parser; use snafu::{OptionExt, ResultExt}; use std::{ cell::RefCell, - collections::{hash_map, BTreeSet, HashMap}, + collections::{BTreeSet, HashMap, hash_map}, rc::Rc, sync::OnceLock, }; @@ -336,7 +336,7 @@ impl ExpressionParser { return Err(ExpressionSemanticError::UnexpectedOperator { found: op.as_str().to_string(), } - .into_parser_error(op.as_span())) + .into_parser_error(op.as_span())); } }; @@ -446,7 +446,7 @@ impl ExpressionParser { return Err(ExpressionSemanticError::UnexpectedComparator { comparator: format!("{:?}", second_pair.as_rule()), } - .into_parser_error(span)) + .into_parser_error(span)); } }; let right_expression = self.build_ast(third_pair.into_inner(), variables)?; @@ -504,7 +504,7 @@ mod tests { use crate::codegen::Prelude; use pretty_assertions::assert_str_eq; use proc_macro2::TokenStream; - use quote::{quote, ToTokens}; + use quote::{ToTokens, quote}; fn parse(name: &str, parameters: &[&str], input: &str) -> String { let parameters: Vec = parameters @@ -615,7 +615,7 @@ mod tests { quote! { #Prelude - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression() -> Option { Some(1f64) } @@ -630,7 +630,7 @@ mod tests { #ADD_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn foo() -> Option { expression_fn_add__n_n(Some(1f64), Some(2f64)) } @@ -645,7 +645,7 @@ mod tests { #ADD_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn bar() -> Option { expression_fn_add__n_n(Some(-1f64), Some(2f64)) } @@ -660,7 +660,7 @@ mod tests { #SUB_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn baz() -> Option { expression_fn_sub__n_n(Some(1f64), Some(-2f64)) } @@ -683,7 +683,7 @@ mod tests { } } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression() -> Option { expression_fn_add__n_n( Some(1f64), @@ -701,7 +701,7 @@ mod tests { #POW_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression() -> Option { expression_fn_pow__n_n(Some(2f64) , Some(4f64)) } @@ -719,7 +719,7 @@ mod tests { #ADD_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression(a: Option) -> Option { expression_fn_add__n_n(a, Some(1f64)) } @@ -736,7 +736,7 @@ mod tests { #DIV_FN #SUB_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn ndvi(a: Option, b: Option) -> Option { expression_fn_div__n_n( expression_fn_sub__n_n(a, b), @@ -764,7 +764,7 @@ mod tests { } } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression(a: Option) -> Option { expression_fn_max__n_n(a, Some(0f64)) } @@ -789,7 +789,7 @@ mod tests { a.map(f64::sqrt) } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression(a: Option) -> Option { expression_fn_pow__n_n(expression_fn_sqrt__n(a), Some(2f64)) } @@ -827,7 +827,7 @@ mod tests { a.map(f64::tan) } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn waves() -> Option { expression_fn_cos__n(expression_fn_sin__n(expression_fn_tan__n(expression_fn_acos__n(expression_fn_asin__n(expression_fn_atan__n(Some(1f64))))))) } @@ -853,7 +853,7 @@ mod tests { Some(std::f64::consts::PI) } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn non_linear() -> Option { expression_fn_ln__n(expression_fn_log10__n(expression_fn_pi_())) } @@ -880,7 +880,7 @@ mod tests { a.map(f64::round) } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn rounding() -> Option { expression_fn_add__n_n( expression_fn_add__n_n( @@ -909,7 +909,7 @@ mod tests { a.map(f64::to_radians) } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn radians() -> Option { expression_fn_add__n_n(expression_fn_to_radians__n(Some(1.3f64)), expression_fn_to_degrees__n(Some(1.3f64))) } @@ -934,7 +934,7 @@ mod tests { } } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn mod_e() -> Option { expression_fn_mod__n_n(Some(5f64), expression_fn_e_()) } @@ -946,7 +946,7 @@ mod tests { try_parse("will_not_compile", &[], DataType::Number, "max(1, 2, 3)") .unwrap_err() .to_string(), - " --> 1:1\n |\n1 | max(1, 2, 3)\n | ^----------^\n |\n = Invalid function arguments for function `max`: expected [number, number], got [number, number, number]" + " --> 1:1\n |\n1 | max(1, 2, 3)\n | ^----------^\n |\n = Invalid function arguments for function `max`: expected [number, number], got [number, number, number]" ); } @@ -957,7 +957,7 @@ mod tests { quote! { #Prelude - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression(a: Option) -> Option { if ((a) == (None)) { Some(0f64) @@ -986,7 +986,7 @@ mod tests { #MUL_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression(A: Option, B: Option) -> Option { if ((A) == (None)) { expression_fn_mul__n_n(B, Some(2f64)) @@ -1009,7 +1009,7 @@ mod tests { quote! { #Prelude - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression() -> Option { if true { Some(1f64) @@ -1032,7 +1032,7 @@ mod tests { #ADD_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression() -> Option { if true { Some(1f64) @@ -1058,7 +1058,7 @@ mod tests { #ADD_FN #SUB_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression() -> Option { if ((Some(1f64)) < (Some(2f64))) { Some(1f64) @@ -1095,7 +1095,7 @@ mod tests { } } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression() -> Option { if ((true) && (false)) { Some(1f64) @@ -1125,7 +1125,7 @@ mod tests { #ADD_FN - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn expression() -> Option { let a = Some(1.2f64); let b = Some(2f64); @@ -1148,10 +1148,10 @@ mod tests { let c = 2; a + b", ) - .unwrap_err() - .to_string(), - " --> 2:25\n |\n2 | let b = C;\n | ^\n |\n = The variable `C` was not defined", - "no access before declaration" + .unwrap_err() + .to_string(), + " --> 2:25\n |\n2 | let b = C;\n | ^\n |\n = The variable `C` was not defined", + "no access before declaration" ); assert_eq!( @@ -1162,10 +1162,10 @@ mod tests { "let A = 2; a", ) - .unwrap_err() - .to_string(), - " --> 1:1\n |\n1 | let A = 2;\n2 | a\n | ^---------------^\n |\n = The variable `A` was already defined", - "no shadowing" + .unwrap_err() + .to_string(), + " --> 1:1\n |\n1 | let A = 2;\n2 | a\n | ^---------------^\n |\n = The variable `A` was already defined", + "no shadowing" ); } @@ -1233,7 +1233,7 @@ mod tests { try_parse("expression", &[], DataType::MultiPoint, "1",) .unwrap_err() .to_string(), - " --> 1:1\n |\n1 | \n | ^---\n |\n = The expression was expected to output `geometry (multipoint)`, but it outputs `number`", + " --> 1:1\n |\n1 | \n | ^---\n |\n = The expression was expected to output `geometry (multipoint)`, but it outputs `number`", "cannot call with wrong output" ); } @@ -1257,7 +1257,7 @@ mod tests { geom.centroid() } - #[no_mangle] + #[unsafe(no_mangle)] pub extern "Rust" fn make_centroid( geom: Option ) -> Option { diff --git a/macros/src/testing.rs b/macros/src/testing.rs index a648853129..cd6d9235e2 100644 --- a/macros/src/testing.rs +++ b/macros/src/testing.rs @@ -1,7 +1,7 @@ use proc_macro2::{Span, TokenStream}; use quote::quote; use std::collections::HashMap; -use syn::{parse::Parser, punctuated::Punctuated, FnArg, Ident, ItemFn, Lit, Pat, TypePath}; +use syn::{FnArg, Ident, ItemFn, Lit, Pat, TypePath, parse::Parser, punctuated::Punctuated}; pub type Result = std::result::Result; pub type AttributeArgs = syn::punctuated::Punctuated; @@ -117,7 +117,7 @@ impl TestConfig { return Err(syn::Error::new_spanned( lit, "test_execution must be \"parallel\" or \"serial\"", - )) + )); } } } @@ -389,7 +389,7 @@ fn parse_inputs(inputs: &Punctuated) -> Result return Err(syn::Error::new_spanned( input, format!("Unknown input type: {other}"), - )) + )); } }; diff --git a/operators/benches/bands.rs b/operators/benches/bands.rs index 2f5a59c8bd..28064dc31f 100644 --- a/operators/benches/bands.rs +++ b/operators/benches/bands.rs @@ -19,7 +19,7 @@ use geoengine_operators::{ TemporalRasterAggregationParameters, }, source::{GdalSource, GdalSourceParameters}, - util::{gdal::add_ndvi_dataset, number_statistics::NumberStatistics, Result}, + util::{Result, gdal::add_ndvi_dataset, number_statistics::NumberStatistics}, }; use serde::Serialize; diff --git a/operators/benches/cache.rs b/operators/benches/cache.rs index dcc9c93033..499ec72272 100644 --- a/operators/benches/cache.rs +++ b/operators/benches/cache.rs @@ -19,8 +19,8 @@ use geoengine_operators::{ NeighborhoodParams, }, source::{GdalSource, GdalSourceParameters}, - util::gdal::add_ndvi_dataset, util::Result, + util::gdal::add_ndvi_dataset, }; use std::sync::Arc; diff --git a/operators/benches/cache_concurrent.rs b/operators/benches/cache_concurrent.rs index 6474c2adba..862ef5915c 100644 --- a/operators/benches/cache_concurrent.rs +++ b/operators/benches/cache_concurrent.rs @@ -17,8 +17,8 @@ use rand::rngs::SmallRng; use rand::{Rng, SeedableRng}; use serde_json::json; use std::hint::black_box; -use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; +use std::sync::atomic::{AtomicUsize, Ordering}; static WRITTEN_ELEMENTS: AtomicUsize = AtomicUsize::new(0); diff --git a/operators/benches/expression.rs b/operators/benches/expression.rs index 468e17d008..f8ed69c408 100644 --- a/operators/benches/expression.rs +++ b/operators/benches/expression.rs @@ -15,7 +15,7 @@ use geoengine_operators::{ }, processing::{Expression, ExpressionParams, RasterStacker, RasterStackerParams}, source::{GdalSource, GdalSourceParameters}, - util::{gdal::add_ndvi_dataset, number_statistics::NumberStatistics, Result}, + util::{Result, gdal::add_ndvi_dataset, number_statistics::NumberStatistics}, }; use serde::Serialize; diff --git a/operators/benches/query_chunks.rs b/operators/benches/query_chunks.rs index 7e037ec222..e96c3bdaa4 100644 --- a/operators/benches/query_chunks.rs +++ b/operators/benches/query_chunks.rs @@ -42,8 +42,8 @@ use geoengine_operators::{ }, source::{GdalSource, GdalSourceParameters, OgrSource, OgrSourceParameters}, util::{ - gdal::{add_ndvi_dataset, add_ports_dataset}, Result, + gdal::{add_ndvi_dataset, add_ports_dataset}, }, }; use std::{ @@ -142,10 +142,12 @@ fn setup_benchmarks(exe_ctx: &mut StatisticsWrappingMockExecutionContext) -> Vec }, } .boxed(), - rasters: vec![GdalSource { - params: GdalSourceParameters { data: ndvi_id }, - } - .boxed()], + rasters: vec![ + GdalSource { + params: GdalSourceParameters { data: ndvi_id }, + } + .boxed(), + ], }, } .boxed(), diff --git a/operators/benches/sources.rs b/operators/benches/sources.rs index 7d08db0766..afde6a93bc 100644 --- a/operators/benches/sources.rs +++ b/operators/benches/sources.rs @@ -302,7 +302,9 @@ fn bench_tile_size() { } fn main() { - println!("Bench_name, query_name, tilesize_x, tilesize_y, query_time (ns), tiles_produced, pixels_produced, stream_collect_time (ns) "); + println!( + "Bench_name, query_name, tilesize_x, tilesize_y, query_time (ns), tiles_produced, pixels_produced, stream_collect_time (ns) " + ); bench_no_data_tiles(); bench_tile_size(); diff --git a/operators/src/adapters/band_extractor.rs b/operators/src/adapters/band_extractor.rs index edf9e92960..86a0089415 100644 --- a/operators/src/adapters/band_extractor.rs +++ b/operators/src/adapters/band_extractor.rs @@ -4,7 +4,7 @@ use std::{ }; use crate::util::Result; -use futures::{ready, Stream}; +use futures::{Stream, ready}; use geoengine_datatypes::raster::{Pixel, RasterTile2D}; use pin_project::pin_project; diff --git a/operators/src/adapters/feature_collection_merger.rs b/operators/src/adapters/feature_collection_merger.rs index 46aaede13b..e71b79f5b6 100644 --- a/operators/src/adapters/feature_collection_merger.rs +++ b/operators/src/adapters/feature_collection_merger.rs @@ -1,7 +1,7 @@ use crate::util::Result; +use futures::Stream; use futures::ready; use futures::stream::FusedStream; -use futures::Stream; use geoengine_datatypes::collections::{ FeatureCollection, FeatureCollectionInfos, FeatureCollectionModifications, }; @@ -222,25 +222,29 @@ mod tests { assert_eq!(collections.len(), 2); - assert!(collections[0].chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_data( - MultiPoint::many(coordinates[0..6].to_vec()).unwrap(), - vec![TimeInterval::default(); 6], - Default::default(), - CacheHint::default() + assert!( + collections[0].chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_data( + MultiPoint::many(coordinates[0..6].to_vec()).unwrap(), + vec![TimeInterval::default(); 6], + Default::default(), + CacheHint::default() + ) + .unwrap() ) - .unwrap() - )); + ); - assert!(collections[1].chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_data( - MultiPoint::many(coordinates[6..10].to_vec()).unwrap(), - vec![TimeInterval::default(); 4], - Default::default(), - CacheHint::default() + assert!( + collections[1].chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_data( + MultiPoint::many(coordinates[6..10].to_vec()).unwrap(), + vec![TimeInterval::default(); 4], + Default::default(), + CacheHint::default() + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -305,31 +309,35 @@ mod tests { .await; assert_eq!(merged_collections.len(), 3); - assert!(merged_collections[0] - .as_ref() - .unwrap() - .chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_data( - MultiPoint::many(vec![(0.0, 0.1)]).unwrap(), - vec![TimeInterval::new(0, 1).unwrap()], - Default::default(), - CacheHint::default() - ) + assert!( + merged_collections[0] + .as_ref() .unwrap() - )); - assert!(merged_collections[1].is_err()); - assert!(merged_collections[2] - .as_ref() - .unwrap() - .chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_data( - MultiPoint::many(vec![(1.0, 1.1)]).unwrap(), - vec![TimeInterval::new(0, 1).unwrap()], - Default::default(), - CacheHint::default() + .chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_data( + MultiPoint::many(vec![(0.0, 0.1)]).unwrap(), + vec![TimeInterval::new(0, 1).unwrap()], + Default::default(), + CacheHint::default() + ) + .unwrap() ) + ); + assert!(merged_collections[1].is_err()); + assert!( + merged_collections[2] + .as_ref() .unwrap() - )); + .chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_data( + MultiPoint::many(vec![(1.0, 1.1)]).unwrap(), + vec![TimeInterval::new(0, 1).unwrap()], + Default::default(), + CacheHint::default() + ) + .unwrap() + ) + ); } #[tokio::test(flavor = "current_thread")] @@ -373,17 +381,19 @@ mod tests { .await; assert_eq!(merged_collections.len(), 1); - assert!(merged_collections[0] - .as_ref() - .unwrap() - .chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_data( - MultiPoint::many(vec![(0.0, 0.1), (1.0, 1.1)]).unwrap(), - vec![TimeInterval::new(0, 1).unwrap(); 2], - Default::default(), - CacheHint::default() - ) + assert!( + merged_collections[0] + .as_ref() .unwrap() - )); + .chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_data( + MultiPoint::many(vec![(0.0, 0.1), (1.0, 1.1)]).unwrap(), + vec![TimeInterval::new(0, 1).unwrap(); 2], + Default::default(), + CacheHint::default() + ) + .unwrap() + ) + ); } } diff --git a/operators/src/adapters/mod.rs b/operators/src/adapters/mod.rs index 2e0c949fbb..e9d7590117 100644 --- a/operators/src/adapters/mod.rs +++ b/operators/src/adapters/mod.rs @@ -12,12 +12,12 @@ use band_extractor::BandExtractor; pub use feature_collection_merger::FeatureCollectionChunkMerger; pub use raster_stacker::{RasterStackerAdapter, RasterStackerSource}; pub use raster_subquery::{ - fold_by_coordinate_lookup_future, FoldTileAccu, FoldTileAccuMut, RasterSubQueryAdapter, - SubQueryTileAggregator, TileReprojectionSubQuery, + FoldTileAccu, FoldTileAccuMut, RasterSubQueryAdapter, SubQueryTileAggregator, + TileReprojectionSubQuery, fold_by_coordinate_lookup_future, }; pub use raster_time::{QueryWrapper, Queryable, RasterArrayTimeAdapter, RasterTimeAdapter}; pub use simple_raster_stacker::{ - stack_individual_aligned_raster_bands, SimpleRasterStackerAdapter, SimpleRasterStackerSource, + SimpleRasterStackerAdapter, SimpleRasterStackerSource, stack_individual_aligned_raster_bands, }; pub use sparse_tiles_fill_adapter::{ FillerTileCacheExpirationStrategy, FillerTimeBounds, SparseTilesFillAdapter, @@ -27,7 +27,7 @@ pub use stream_statistics_adapter::StreamStatisticsAdapter; use self::raster_time_substream::RasterTimeMultiFold; use crate::util::Result; -use futures::{stream::Fuse, Future, Stream, StreamExt}; +use futures::{Future, Stream, StreamExt, stream::Fuse}; use geoengine_datatypes::{ collections::FeatureCollection, primitives::Geometry, diff --git a/operators/src/adapters/raster_stacker.rs b/operators/src/adapters/raster_stacker.rs index 78c78413a7..41b3bc3572 100644 --- a/operators/src/adapters/raster_stacker.rs +++ b/operators/src/adapters/raster_stacker.rs @@ -1,7 +1,7 @@ use crate::util::Result; use futures::future::JoinAll; use futures::stream::{Fuse, FusedStream, Stream}; -use futures::{ready, Future, StreamExt}; +use futures::{Future, StreamExt, ready}; use geoengine_datatypes::primitives::{ BandSelection, RasterQueryRectangle, SpatialPartition2D, SpatialResolution, TimeInterval, }; @@ -313,28 +313,37 @@ where }; debug_assert_eq!( - tile.band, *current_band as u32, - "RasterStacker got tile with unexpected band index: expected {}, got {} for source {}", - current_band, - tile.band, - current_stream - ); + tile.band, *current_band as u32, + "RasterStacker got tile with unexpected band index: expected {}, got {} for source {}", + current_band, tile.band, current_stream + ); debug_assert!( - tile.time.contains(time_slice), - "RasterStacker got tile with unexpected time: time slice [{}, {}) not contained in tile time [{}, {}) for source {}", - time_slice.start().as_datetime_string(), - time_slice.end().as_datetime_string(), - tile.time.start().as_datetime_string(), - tile.time.end().as_datetime_string(), - current_stream - ); + tile.time.contains(time_slice), + "RasterStacker got tile with unexpected time: time slice [{}, {}) not contained in tile time [{}, {}) for source {}", + time_slice.start().as_datetime_string(), + time_slice.end().as_datetime_string(), + tile.time.start().as_datetime_string(), + tile.time.end().as_datetime_string(), + current_stream + ); debug_assert_eq!( - Some(tile.tile_position), Self::grid_idx_for_nth_tile(&tile.tile_information(), query_rect.spatial_bounds, *current_spatial_tile), - "RasteStacker got tile with unexpected tile_position: expected {:?}, got {:?} for source {}", - Self::grid_idx_for_nth_tile(&tile.tile_information(), query_rect.spatial_bounds, *current_spatial_tile), tile.tile_position, current_stream - ); + Some(tile.tile_position), + Self::grid_idx_for_nth_tile( + &tile.tile_information(), + query_rect.spatial_bounds, + *current_spatial_tile + ), + "RasteStacker got tile with unexpected tile_position: expected {:?}, got {:?} for source {}", + Self::grid_idx_for_nth_tile( + &tile.tile_information(), + query_rect.spatial_bounds, + *current_spatial_tile + ), + tile.tile_position, + current_stream + ); tile.band = sources .iter() @@ -679,14 +688,16 @@ mod tests { let query_ctx = MockQueryContext::test_default(); let stacker = RasterStackerAdapter::new( - vec![( - QueryWrapper { - p: &qp1, - ctx: &query_ctx, - }, - vec![0], - ) - .into()], + vec![ + ( + QueryWrapper { + p: &qp1, + ctx: &query_ctx, + }, + vec![0], + ) + .into(), + ], PartialQueryRect { spatial_bounds: SpatialPartition2D::new_unchecked([0., 1.].into(), [3., 0.].into()), time_interval: TimeInterval::new_unchecked(0, 10), diff --git a/operators/src/adapters/raster_subquery/mod.rs b/operators/src/adapters/raster_subquery/mod.rs index bbdafb52e9..260c950beb 100644 --- a/operators/src/adapters/raster_subquery/mod.rs +++ b/operators/src/adapters/raster_subquery/mod.rs @@ -6,5 +6,5 @@ pub use raster_subquery_adapter::{ }; pub use raster_subquery_reprojection::{ - fold_by_coordinate_lookup_future, TileReprojectionSubQuery, + TileReprojectionSubQuery, fold_by_coordinate_lookup_future, }; diff --git a/operators/src/adapters/raster_subquery/raster_subquery_adapter.rs b/operators/src/adapters/raster_subquery/raster_subquery_adapter.rs index d1ead5f871..4ee9d13067 100644 --- a/operators/src/adapters/raster_subquery/raster_subquery_adapter.rs +++ b/operators/src/adapters/raster_subquery/raster_subquery_adapter.rs @@ -1,17 +1,16 @@ +use crate::adapters::SparseTilesFillAdapter; use crate::adapters::sparse_tiles_fill_adapter::{ FillerTileCacheExpirationStrategy, FillerTimeBounds, }; -use crate::adapters::SparseTilesFillAdapter; use crate::engine::{QueryContext, QueryProcessor, RasterQueryProcessor, RasterResultDescriptor}; use crate::error; use crate::util::Result; use futures::future::BoxFuture; +use futures::{Future, stream::FusedStream}; use futures::{ - ready, + FutureExt, TryFuture, TryStreamExt, ready, stream::{BoxStream, TryFold}, - FutureExt, TryFuture, TryStreamExt, }; -use futures::{stream::FusedStream, Future}; use futures::{Stream, StreamExt, TryFutureExt}; use geoengine_datatypes::primitives::{BandSelection, CacheHint}; use geoengine_datatypes::primitives::{ @@ -218,11 +217,11 @@ impl<'a, PixelType, RasterProcessorType, SubQuery> FusedStream where PixelType: Pixel, RasterProcessorType: QueryProcessor< - Output = RasterTile2D, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, SubQuery: SubQueryTileAggregator<'a, PixelType> + 'static, { fn is_terminated(&self) -> bool { @@ -235,11 +234,11 @@ impl<'a, PixelType, RasterProcessorType, SubQuery> Stream where PixelType: Pixel, RasterProcessorType: QueryProcessor< - Output = RasterTile2D, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, SubQuery: SubQueryTileAggregator<'a, PixelType> + 'static, { type Item = Result>>; @@ -585,7 +584,7 @@ pub fn identity_accu( tile_info: TileInformation, query_rect: &RasterQueryRectangle, pool: Arc, -) -> impl Future>> { +) -> impl Future>> + use { let time_interval = query_rect.time_interval; crate::util::spawn_blocking(move || { let output_raster = EmptyGrid2D::new(tile_info.tile_size_in_pixels).into(); diff --git a/operators/src/adapters/raster_subquery/raster_subquery_reprojection.rs b/operators/src/adapters/raster_subquery/raster_subquery_reprojection.rs index cf4db0e8ca..cf4765f7e3 100644 --- a/operators/src/adapters/raster_subquery/raster_subquery_reprojection.rs +++ b/operators/src/adapters/raster_subquery/raster_subquery_reprojection.rs @@ -26,9 +26,9 @@ use geoengine_datatypes::{ }; use log::debug; use num; +use rayon::ThreadPool; use rayon::iter::{IndexedParallelIterator, ParallelIterator}; use rayon::slice::ParallelSliceMut; -use rayon::ThreadPool; use super::{FoldTileAccu, FoldTileAccuMut, SubQueryTileAggregator}; @@ -125,7 +125,7 @@ fn build_accu( valid_bounds_out: SpatialPartition2D, out_srs: SpatialReference, in_srs: SpatialReference, -) -> impl Future>> { +) -> impl Future>> + use { let time_interval = query_rect.time_interval; crate::util::spawn_blocking(move || { let output_raster = EmptyGrid::new(tile_info.tile_size_in_pixels); diff --git a/operators/src/adapters/raster_time.rs b/operators/src/adapters/raster_time.rs index cc01c6783b..0a14ec5454 100644 --- a/operators/src/adapters/raster_time.rs +++ b/operators/src/adapters/raster_time.rs @@ -1,10 +1,10 @@ use crate::engine::{QueryContext, RasterQueryProcessor}; -use crate::util::stream_zip::StreamArrayZip; use crate::util::Result; +use crate::util::stream_zip::StreamArrayZip; use futures::future::{self, BoxFuture, Join, JoinAll}; use futures::stream::{BoxStream, FusedStream, Zip}; -use futures::{ready, StreamExt}; use futures::{Future, Stream}; +use futures::{StreamExt, ready}; use geoengine_datatypes::primitives::{RasterQueryRectangle, SpatialPartition2D, TimeInterval}; use geoengine_datatypes::raster::{GridSize, Pixel, RasterTile2D, TileInformation, TilingStrategy}; use pin_project::pin_project; diff --git a/operators/src/adapters/raster_time_substream.rs b/operators/src/adapters/raster_time_substream.rs index 05781396c6..047a4f8842 100644 --- a/operators/src/adapters/raster_time_substream.rs +++ b/operators/src/adapters/raster_time_substream.rs @@ -5,9 +5,8 @@ use std::{ use crate::util::Result; use futures::{ - ready, + Future, ready, stream::{FusedStream, Stream}, - Future, }; use geoengine_datatypes::{ primitives::TimeInterval, diff --git a/operators/src/adapters/simple_raster_stacker.rs b/operators/src/adapters/simple_raster_stacker.rs index c2ce786cb0..bd8d3255c7 100644 --- a/operators/src/adapters/simple_raster_stacker.rs +++ b/operators/src/adapters/simple_raster_stacker.rs @@ -2,7 +2,7 @@ use crate::error::{AtLeastOneStreamRequired, Error}; use crate::util::Result; use futures::future::join_all; use futures::stream::{BoxStream, Stream}; -use futures::{ready, Future}; +use futures::{Future, ready}; use geoengine_datatypes::primitives::{BandSelection, RasterQueryRectangle, TimeInterval}; use geoengine_datatypes::raster::{Pixel, RasterTile2D}; use pin_project::pin_project; @@ -165,7 +165,7 @@ where #[cfg(test)] mod tests { - use futures::{stream, StreamExt}; + use futures::{StreamExt, stream}; use geoengine_datatypes::{ primitives::{CacheHint, TimeInterval}, raster::{Grid, TilesEqualIgnoringCacheHint}, diff --git a/operators/src/adapters/sparse_tiles_fill_adapter.rs b/operators/src/adapters/sparse_tiles_fill_adapter.rs index 811cd2b0cd..29ca248994 100644 --- a/operators/src/adapters/sparse_tiles_fill_adapter.rs +++ b/operators/src/adapters/sparse_tiles_fill_adapter.rs @@ -1,5 +1,5 @@ use crate::util::Result; -use futures::{ready, Stream}; +use futures::{Stream, ready}; use geoengine_datatypes::{ primitives::{ CacheExpiration, CacheHint, RasterQueryRectangle, SpatialPartitioned, TimeInstance, @@ -277,8 +277,11 @@ impl StateContainer { if requested_start < first_tile_time.start() { log::debug!( - "The initial tile starts ({}) after the requested start bound ({}), setting the current time to the data start bound ({}) --> filling", first_tile_time.start(), requested_start, start_data_bound - ); + "The initial tile starts ({}) after the requested start bound ({}), setting the current time to the data start bound ({}) --> filling", + first_tile_time.start(), + requested_start, + start_data_bound + ); self.current_time = Some(TimeInterval::new_unchecked( start_data_bound, first_tile_time.start(), @@ -287,10 +290,10 @@ impl StateContainer { } if start_data_bound > first_tile_time.start() { log::debug!( - "The initial tile time start ({}) is before the exprected time bounds ({}). This means the data overflows the filler start bound.", - first_tile_time.start(), - start_data_bound - ); + "The initial tile time start ({}) is before the exprected time bounds ({}). This means the data overflows the filler start bound.", + first_tile_time.start(), + start_data_bound + ); } self.current_time = Some(first_tile_time); } @@ -348,10 +351,10 @@ impl StateContainer { } if current_time.end() > time_bounds_end { log::debug!( - "The current time end ({}) is after the exprected time bounds ({}). This means the data overflows the filler end bound.", - current_time.end(), - time_bounds_end - ); + "The current time end ({}) is after the exprected time bounds ({}). This means the data overflows the filler end bound.", + current_time.end(), + time_bounds_end + ); } true @@ -527,10 +530,10 @@ where } if tile.time.start() >= this.sc.requested_time_bounds.end() { log::warn!( - "The tile time start ({}) is outside of the requested time bounds ({})!", - tile.time.start(), - this.sc.requested_time_bounds.end() - ); + "The tile time start ({}) is outside of the requested time bounds ({})!", + tile.time.start(), + this.sc.requested_time_bounds.end() + ); } // 1. b) This is a new grid run but the time is not increased @@ -861,7 +864,7 @@ impl From for FillerTileCacheHintProvider { #[cfg(test)] mod tests { - use futures::{stream, StreamExt}; + use futures::{StreamExt, stream}; use geoengine_datatypes::{ primitives::{CacheHint, TimeInterval}, raster::Grid, diff --git a/operators/src/adapters/stream_statistics_adapter.rs b/operators/src/adapters/stream_statistics_adapter.rs index 5a02362a0c..d06cfba62a 100644 --- a/operators/src/adapters/stream_statistics_adapter.rs +++ b/operators/src/adapters/stream_statistics_adapter.rs @@ -1,5 +1,5 @@ use crate::{engine::WorkflowOperatorPath, meta::quota::QuotaTracking}; -use futures::{ready, Stream}; +use futures::{Stream, ready}; use pin_project::pin_project; use std::{ pin::Pin, @@ -114,7 +114,7 @@ mod tests { use crate::meta::quota::{ComputationUnit, QuotaMessage}; use futures::StreamExt; use tokio::sync::mpsc::unbounded_channel; - use tracing::{span, Level}; + use tracing::{Level, span}; use uuid::Uuid; #[tokio::test] diff --git a/operators/src/cache/cache_chunks.rs b/operators/src/cache/cache_chunks.rs index 297fb0873f..73db187332 100644 --- a/operators/src/cache/cache_chunks.rs +++ b/operators/src/cache/cache_chunks.rs @@ -18,7 +18,7 @@ use geoengine_datatypes::{ GeometryCollection, IntoGeometryIterator, }, primitives::{Geometry, MultiLineString, MultiPoint, MultiPolygon, NoGeometry}, - util::{arrow::ArrowTyped, ByteSize}, + util::{ByteSize, arrow::ArrowTyped}, }; use std::collections::HashMap; use std::io::Cursor; @@ -179,12 +179,12 @@ where // If the chunk has no time bounds it must be empty so we can skip the temporal check and return true. let temporal_hit = self .time_interval - .map_or(true, |tb| tb.intersects(&query.time_interval)); + .is_none_or(|tb| tb.intersects(&query.time_interval)); // If the chunk has no spatial bounds it is either an empty collection or a no geometry collection. let spatial_hit = self .spatial_bounds - .map_or(true, |sb| sb.intersects_bbox(&query.spatial_bounds)); + .is_none_or(|sb| sb.intersects_bbox(&query.spatial_bounds)); temporal_hit && spatial_hit } @@ -318,10 +318,10 @@ impl CacheElement for FeatureCollection where G: Geometry + ArrowTyped + 'static, CompressedFeatureCollection: CacheBackendElementExt< - Query = VectorQueryRectangle, - LandingZoneContainer = LandingZoneQueryFeatures, - CacheContainer = CachedFeatures, - >, + Query = VectorQueryRectangle, + LandingZoneContainer = LandingZoneQueryFeatures, + CacheContainer = CachedFeatures, + >, FeatureCollection: ByteSize + CacheElementSpatialBounds, { type StoredCacheElement = CompressedFeatureCollection; diff --git a/operators/src/cache/cache_operator.rs b/operators/src/cache/cache_operator.rs index 75c4569173..d2926bd9fb 100644 --- a/operators/src/cache/cache_operator.rs +++ b/operators/src/cache/cache_operator.rs @@ -12,7 +12,7 @@ use crate::error::Error; use crate::util::Result; use async_trait::async_trait; use futures::stream::{BoxStream, FusedStream}; -use futures::{ready, Stream, StreamExt, TryStreamExt}; +use futures::{Stream, StreamExt, TryStreamExt, ready}; use geoengine_datatypes::collections::{FeatureCollection, FeatureCollectionInfos}; use geoengine_datatypes::primitives::{ AxisAlignedRectangle, Geometry, QueryAttributeSelection, QueryRectangle, VectorQueryRectangle, @@ -23,7 +23,7 @@ use geoengine_datatypes::util::helpers::ge_report; use pin_project::{pin_project, pinned_drop}; use std::pin::Pin; use std::task::{Context, Poll}; -use tokio::sync::mpsc::{unbounded_channel, UnboundedSender}; +use tokio::sync::mpsc::{UnboundedSender, unbounded_channel}; /// A cache operator that caches the results of its source operator pub struct InitializedCacheOperator { @@ -264,7 +264,8 @@ where let result = tile_cache.finish_query(&cache_key, &query_id).await; log::debug!( "finished cache insertion for cache key {} and query id {}, result: {:?}", - cache_key,query_id, + cache_key, + query_id, result ); } diff --git a/operators/src/cache/shared_cache.rs b/operators/src/cache/shared_cache.rs index 15b1e637bf..b966aa8587 100644 --- a/operators/src/cache/shared_cache.rs +++ b/operators/src/cache/shared_cache.rs @@ -12,7 +12,7 @@ use geoengine_datatypes::{ identifier, primitives::{CacheHint, Geometry, RasterQueryRectangle, VectorQueryRectangle}, raster::Pixel, - util::{arrow::ArrowTyped, test::TestDefault, ByteSize, Identifier}, + util::{ByteSize, Identifier, arrow::ArrowTyped, test::TestDefault}, }; use log::{debug, log_enabled}; use lru::LruCache; @@ -171,7 +171,10 @@ where // debug output log::debug!( "Removed query {}. Landing zone size: {}. Landing zone size used: {}, Landing zone used percentage: {}.", - query_id, self.landing_zone_size.total_byte_size(), self.landing_zone_size.byte_size_used(), self.landing_zone_size.size_used_fraction() + query_id, + self.landing_zone_size.total_byte_size(), + self.landing_zone_size.byte_size_used(), + self.landing_zone_size.size_used_fraction() ); Some(entry) @@ -197,7 +200,10 @@ where log::debug!( "Removed cache entry {}. Cache size: {}. Cache size used: {}, Cache used percentage: {}.", - cache_entry_id, self.cache_size.total_byte_size(), self.cache_size.byte_size_used(), self.cache_size.size_used_fraction() + cache_entry_id, + self.cache_size.total_byte_size(), + self.cache_size.byte_size_used(), + self.cache_size.size_used_fraction() ); Some(entry) @@ -262,7 +268,10 @@ where log::trace!( "Inserted tile for query {} into landing zone. Landing zone size: {}. Landing zone size used: {}. Landing zone used percentage: {}", - query_id, self.landing_zone_size.total_byte_size(), self.landing_zone_size.byte_size_used(), self.landing_zone_size.size_used_fraction() + query_id, + self.landing_zone_size.total_byte_size(), + self.landing_zone_size.byte_size_used(), + self.landing_zone_size.size_used_fraction() ); Ok(()) @@ -309,7 +318,10 @@ where // debug output log::trace!( "Added query {} to landing zone. Landing zone size: {}. Landing zone size used: {}, Landing zone used percentage: {}.", - query_id, self.landing_zone_size.total_byte_size(), self.landing_zone_size.byte_size_used(), self.landing_zone_size.size_used_fraction() + query_id, + self.landing_zone_size.total_byte_size(), + self.landing_zone_size.byte_size_used(), + self.landing_zone_size.size_used_fraction() ); Ok(query_id) @@ -385,9 +397,9 @@ struct CacheQueryResult<'a, Query, CE> { pub trait Cache: CacheView< - CacheQueryEntry, - CacheQueryEntry, -> + CacheQueryEntry, + CacheQueryEntry, + > where C::Query: Clone + CacheQueryMatch, { @@ -543,10 +555,10 @@ impl Cache> for CacheBackend where T: Pixel, CompressedRasterTile2D: CacheBackendElementExt< - Query = RasterQueryRectangle, - LandingZoneContainer = LandingZoneQueryTiles, - CacheContainer = CachedTiles, - >, + Query = RasterQueryRectangle, + LandingZoneContainer = LandingZoneQueryTiles, + CacheContainer = CachedTiles, + >, { fn operator_cache_view_mut( &mut self, @@ -567,10 +579,10 @@ impl Cache> for CacheBackend where T: Geometry + ArrowTyped, CompressedFeatureCollection: CacheBackendElementExt< - Query = VectorQueryRectangle, - LandingZoneContainer = LandingZoneQueryFeatures, - CacheContainer = CachedFeatures, - >, + Query = VectorQueryRectangle, + LandingZoneContainer = LandingZoneQueryFeatures, + CacheContainer = CachedFeatures, + >, { fn operator_cache_view_mut( &mut self, @@ -1251,14 +1263,16 @@ mod tests { } // access fails because ttl is expired - assert!(>>::query_cache( - &tile_cache, - &op(1), - &query_rect() - ) - .await - .unwrap() - .is_none()); + assert!( + >>::query_cache( + &tile_cache, + &op(1), + &query_rect() + ) + .await + .unwrap() + .is_none() + ); } #[tokio::test] diff --git a/operators/src/engine/execution_context.rs b/operators/src/engine/execution_context.rs index 71b463ca0a..a689cc50e2 100644 --- a/operators/src/engine/execution_context.rs +++ b/operators/src/engine/execution_context.rs @@ -10,7 +10,7 @@ use crate::error::Error; use crate::meta::wrapper::InitializedOperatorWrapper; use crate::mock::MockDatasetDataSourceLoadingInfo; use crate::source::{GdalLoadingInfo, OgrSourceDataset}; -use crate::util::{create_rayon_thread_pool, Result}; +use crate::util::{Result, create_rayon_thread_pool}; use async_trait::async_trait; use geoengine_datatypes::dataset::{DataId, NamedData}; use geoengine_datatypes::machine_learning::{MlModelMetadata, MlModelName}; diff --git a/operators/src/engine/initialized_sources.rs b/operators/src/engine/initialized_sources.rs index 58dcdee51d..73568b56c8 100644 --- a/operators/src/engine/initialized_sources.rs +++ b/operators/src/engine/initialized_sources.rs @@ -3,8 +3,8 @@ use async_trait::async_trait; use crate::{ error::Error, util::{ - input::{MultiRasterOrVectorOperator, RasterOrVectorOperator}, Result, + input::{MultiRasterOrVectorOperator, RasterOrVectorOperator}, }, }; diff --git a/operators/src/engine/operator.rs b/operators/src/engine/operator.rs index b25ad8ca1e..7da4b9a050 100644 --- a/operators/src/engine/operator.rs +++ b/operators/src/engine/operator.rs @@ -7,10 +7,10 @@ use async_trait::async_trait; use geoengine_datatypes::{dataset::NamedData, util::ByteSize}; use super::{ - query_processor::{TypedRasterQueryProcessor, TypedVectorQueryProcessor}, CloneablePlotOperator, CloneableRasterOperator, CloneableVectorOperator, CreateSpan, ExecutionContext, PlotResultDescriptor, RasterResultDescriptor, TypedPlotQueryProcessor, VectorResultDescriptor, WorkflowOperatorPath, + query_processor::{TypedRasterQueryProcessor, TypedVectorQueryProcessor}, }; pub trait OperatorData { diff --git a/operators/src/engine/query_processor.rs b/operators/src/engine/query_processor.rs index ea5669e644..52a9261864 100644 --- a/operators/src/engine/query_processor.rs +++ b/operators/src/engine/query_processor.rs @@ -6,8 +6,8 @@ use super::{RasterResultDescriptor, ResultDescriptor, VectorResultDescriptor}; use crate::processing::RasterTypeConversionQueryProcessor; use crate::util::Result; use async_trait::async_trait; -use futures::stream::BoxStream; use futures::Stream; +use futures::stream::BoxStream; use geoengine_datatypes::collections::{ DataCollection, MultiLineStringCollection, MultiPolygonCollection, }; @@ -28,9 +28,9 @@ pub trait QueryProcessor: Send + Sync { type SpatialBounds: AxisAlignedRectangle + Send + Sync; type Selection: QueryAttributeSelection; type ResultDescription: ResultDescriptor< - QueryRectangleSpatialBounds = Self::SpatialBounds, - QueryRectangleAttributeSelection = Self::Selection, - >; + QueryRectangleSpatialBounds = Self::SpatialBounds, + QueryRectangleAttributeSelection = Self::Selection, + >; /// inner logic of the processor async fn _query<'a>( diff --git a/operators/src/engine/result_descriptor.rs b/operators/src/engine/result_descriptor.rs index 6a65b6b0f5..ddd10e80b1 100644 --- a/operators/src/engine/result_descriptor.rs +++ b/operators/src/engine/result_descriptor.rs @@ -709,18 +709,22 @@ mod tests { #[test] fn it_checks_duplicate_bands() { - assert!(RasterBandDescriptors::new(vec![ - RasterBandDescriptor::new("foo".into(), Measurement::Unitless), - RasterBandDescriptor::new("bar".into(), Measurement::Unitless), - ]) - .is_ok()); + assert!( + RasterBandDescriptors::new(vec![ + RasterBandDescriptor::new("foo".into(), Measurement::Unitless), + RasterBandDescriptor::new("bar".into(), Measurement::Unitless), + ]) + .is_ok() + ); - assert!(RasterBandDescriptors::new(vec![ - RasterBandDescriptor::new("foo".into(), Measurement::Unitless), - RasterBandDescriptor::new("bar".into(), Measurement::Unitless), - RasterBandDescriptor::new("foo".into(), Measurement::Unitless), - ]) - .is_err()); + assert!( + RasterBandDescriptors::new(vec![ + RasterBandDescriptor::new("foo".into(), Measurement::Unitless), + RasterBandDescriptor::new("bar".into(), Measurement::Unitless), + RasterBandDescriptor::new("foo".into(), Measurement::Unitless), + ]) + .is_err() + ); } #[test] @@ -771,17 +775,19 @@ mod tests { .unwrap() ); - assert!(serde_json::from_value::(json!([{ - "name": "foo", - "measurement": { - "type": "unitless" - } - },{ - "name": "foo", - "measurement": { - "type": "unitless" - } - }])) - .is_err()); + assert!( + serde_json::from_value::(json!([{ + "name": "foo", + "measurement": { + "type": "unitless" + } + },{ + "name": "foo", + "measurement": { + "type": "unitless" + } + }])) + .is_err() + ); } } diff --git a/operators/src/error.rs b/operators/src/error.rs index 3e9e63df78..692402f747 100644 --- a/operators/src/error.rs +++ b/operators/src/error.rs @@ -429,7 +429,9 @@ pub enum Error { source: Box, }, - #[snafu(display("Input stream {stream_index} is not temporally aligned. Expected {expected:?}, found {found:?}."))] + #[snafu(display( + "Input stream {stream_index} is not temporally aligned. Expected {expected:?}, found {found:?}." + ))] InputStreamsMustBeTemporallyAligned { stream_index: usize, expected: TimeInterval, @@ -459,7 +461,11 @@ pub enum Error { found: u32, }, - #[snafu(display("The raster inputs must have the same spatial reference and datatype, but they have the SRS's [{}] and datatypes {:?}.", join(spatial_references, ", "), datatypes))] + #[snafu(display( + "The raster inputs must have the same spatial reference and datatype, but they have the SRS's [{}] and datatypes {:?}.", + join(spatial_references, ", "), + datatypes + ))] RasterInputsMustHaveSameSpatialReferenceAndDatatype { datatypes: Vec, spatial_references: Vec, diff --git a/operators/src/machine_learning/metadata_from_file.rs b/operators/src/machine_learning/metadata_from_file.rs index f1b44826df..85a3a9fcd0 100644 --- a/operators/src/machine_learning/metadata_from_file.rs +++ b/operators/src/machine_learning/metadata_from_file.rs @@ -3,7 +3,7 @@ use crate::machine_learning::error::{ InvalidInputDimensions, InvalidOutputDimensions, MultipleInputsNotSupported, Ort, }; use geoengine_datatypes::{machine_learning::MlModelMetadata, raster::RasterDataType}; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use std::path::Path; pub fn load_model_metadata(path: &Path) -> Result { diff --git a/operators/src/machine_learning/mod.rs b/operators/src/machine_learning/mod.rs index 71994fc31e..d83e95645c 100644 --- a/operators/src/machine_learning/mod.rs +++ b/operators/src/machine_learning/mod.rs @@ -30,12 +30,16 @@ pub enum MachineLearningError { UnsupportedTensorElementType { element_type: ort::tensor::TensorElementType, }, - #[snafu(display("Number of bands in source ({source_bands}) does not match the model input bands ({model_input_bands})."))] + #[snafu(display( + "Number of bands in source ({source_bands}) does not match the model input bands ({model_input_bands})." + ))] InputBandsMismatch { model_input_bands: u32, source_bands: u32, }, - #[snafu(display("Raster data types of source ({source_type:?}) does not match model input type ({model_input_type:?})."))] + #[snafu(display( + "Raster data types of source ({source_type:?}) does not match model input type ({model_input_type:?})." + ))] InputTypeMismatch { model_input_type: RasterDataType, source_type: RasterDataType, diff --git a/operators/src/machine_learning/onnx.rs b/operators/src/machine_learning/onnx.rs index 01dc09b9ff..44fb68afad 100644 --- a/operators/src/machine_learning/onnx.rs +++ b/operators/src/machine_learning/onnx.rs @@ -7,8 +7,8 @@ use crate::error; use crate::machine_learning::error::{InputBandsMismatch, InputTypeMismatch, Ort}; use crate::util::Result; use async_trait::async_trait; -use futures::stream::BoxStream; use futures::StreamExt; +use futures::stream::BoxStream; use geoengine_datatypes::machine_learning::{MlModelMetadata, MlModelName}; use geoengine_datatypes::primitives::{Measurement, RasterQueryRectangle}; use geoengine_datatypes::raster::{ @@ -17,7 +17,7 @@ use geoengine_datatypes::raster::{ use ndarray::Array2; use ort::tensor::{IntoTensorElementType, PrimitiveTensorElementType}; use serde::{Deserialize, Serialize}; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use std::path::PathBuf; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -167,14 +167,12 @@ impl RasterQueryProcessor for OnnxProcessor where TIn: Pixel + NoDataValue, TOut: Pixel + IntoTensorElementType + PrimitiveTensorElementType, - ort::value::Value: std::convert::TryFrom< - ndarray::ArrayBase, ndarray::Dim<[usize; 2]>>, - >, + ort::value::Value: std::convert::TryFrom, ndarray::Dim<[usize; 2]>>>, ort::Error: std::convert::From< - , ndarray::Dim<[usize; 2]>>, - >>::Error, - >, + , ndarray::Dim<[usize; 2]>>, + >>::Error, + >, { type RasterType = TOut; @@ -347,7 +345,7 @@ mod tests { test_data, util::test::TestDefault, }; - use ndarray::{arr2, array, Array1, Array2}; + use ndarray::{Array1, Array2, arr2, array}; use super::*; diff --git a/operators/src/meta/wrapper.rs b/operators/src/meta/wrapper.rs index 96283d2637..61561143ab 100644 --- a/operators/src/meta/wrapper.rs +++ b/operators/src/meta/wrapper.rs @@ -7,13 +7,13 @@ use crate::engine::{ }; use crate::util::Result; use async_trait::async_trait; -use futures::stream::BoxStream; use futures::StreamExt; +use futures::stream::BoxStream; use geoengine_datatypes::primitives::{ AxisAlignedRectangle, QueryAttributeSelection, QueryRectangle, }; use std::sync::atomic::{AtomicUsize, Ordering}; -use tracing::{span, Level}; +use tracing::{Level, span}; // A wrapper around an initialized operator that adds statistics and quota tracking pub struct InitializedOperatorWrapper { diff --git a/operators/src/mock/mock_dataset_data_source.rs b/operators/src/mock/mock_dataset_data_source.rs index 032468f38e..6539602049 100644 --- a/operators/src/mock/mock_dataset_data_source.rs +++ b/operators/src/mock/mock_dataset_data_source.rs @@ -5,9 +5,9 @@ use crate::engine::{ }; use crate::util::Result; use async_trait::async_trait; +use futures::StreamExt; use futures::stream; use futures::stream::BoxStream; -use futures::StreamExt; use geoengine_datatypes::collections::{MultiPointCollection, VectorDataType}; use geoengine_datatypes::dataset::NamedData; use geoengine_datatypes::primitives::CacheHint; @@ -47,11 +47,7 @@ impl MetaData Box< - dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + dyn MetaData, > { Box::new(self.clone()) } @@ -75,11 +71,7 @@ impl MetaData, + dyn MetaData, >, } @@ -198,8 +190,8 @@ mod tests { use geoengine_datatypes::collections::FeatureCollectionInfos; use geoengine_datatypes::dataset::{DataId, DatasetId, NamedData}; use geoengine_datatypes::primitives::{BoundingBox2D, ColumnSelection, SpatialResolution}; - use geoengine_datatypes::util::test::TestDefault; use geoengine_datatypes::util::Identifier; + use geoengine_datatypes::util::test::TestDefault; #[tokio::test] async fn test() { diff --git a/operators/src/plot/box_plot.rs b/operators/src/plot/box_plot.rs index d140564e6e..2760f92125 100644 --- a/operators/src/plot/box_plot.rs +++ b/operators/src/plot/box_plot.rs @@ -1,8 +1,8 @@ use async_trait::async_trait; use futures::StreamExt; use geoengine_datatypes::primitives::{ - partitions_extent, time_interval_extent, AxisAlignedRectangle, BandSelection, BoundingBox2D, - PlotQueryRectangle, RasterQueryRectangle, + AxisAlignedRectangle, BandSelection, BoundingBox2D, PlotQueryRectangle, RasterQueryRectangle, + partitions_extent, time_interval_extent, }; use num_traits::AsPrimitive; use serde::{Deserialize, Serialize}; @@ -19,9 +19,9 @@ use crate::engine::{ WorkflowOperatorPath, }; use crate::error::{self, Error}; +use crate::util::Result; use crate::util::input::MultiRasterOrVectorOperator; use crate::util::statistics::PSquareQuantileEstimator; -use crate::util::Result; use snafu::ensure; pub const BOXPLOT_OPERATOR_NAME: &str = "BoxPlot"; @@ -381,7 +381,7 @@ enum BoxPlotAccumKind { impl BoxPlotAccumKind { fn update(&mut self, values: impl Iterator) -> crate::util::Result<()> { match self { - Self::Exact(ref mut x) => { + Self::Exact(x) => { x.extend(values.filter(|x| x.is_finite())); if x.len() > EXACT_CALC_BOUND { @@ -390,7 +390,7 @@ impl BoxPlotAccumKind { } Ok(()) } - Self::Estimated(ref mut est) => { + Self::Estimated(est) => { for v in values { est.update(v); } @@ -770,14 +770,15 @@ mod tests { #[tokio::test] async fn vector_data_single_feature() { - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 1], &[("foo", FeatureData::Int(vec![1]))], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let box_plot = BoxPlot { params: BoxPlotParams { @@ -822,14 +823,15 @@ mod tests { #[tokio::test] async fn vector_data_empty() { - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[] as &[TimeInterval], &[("foo", FeatureData::Int(vec![]))], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let box_plot = BoxPlot { params: BoxPlotParams { @@ -876,14 +878,15 @@ mod tests { data.push(i); } - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 2 * super::EXACT_CALC_BOUND], &[("foo", FeatureData::Int(data))], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let box_plot = BoxPlot { params: BoxPlotParams { diff --git a/operators/src/plot/class_histogram.rs b/operators/src/plot/class_histogram.rs index aaecb225af..d2e0432815 100644 --- a/operators/src/plot/class_histogram.rs +++ b/operators/src/plot/class_histogram.rs @@ -7,8 +7,8 @@ use crate::engine::{ use crate::engine::{QueryProcessor, WorkflowOperatorPath}; use crate::error; use crate::error::Error; -use crate::util::input::RasterOrVectorOperator; use crate::util::Result; +use crate::util::input::RasterOrVectorOperator; use async_trait::async_trait; use futures::StreamExt; use geoengine_datatypes::collections::FeatureCollectionInfos; @@ -19,7 +19,7 @@ use geoengine_datatypes::primitives::{ }; use num_traits::AsPrimitive; use serde::{Deserialize, Serialize}; -use snafu::{ensure, OptionExt}; +use snafu::{OptionExt, ensure}; use std::collections::HashMap; pub const CLASS_HISTOGRAM_OPERATOR_NAME: &str = "ClassHistogram"; @@ -80,7 +80,7 @@ impl PlotOperator for ClassHistogram { _ => { return Err(Error::InvalidOperatorSpec { reason: "Source measurement mut be classification".to_string(), - }) + }); } }; @@ -145,7 +145,7 @@ impl PlotOperator for ClassHistogram { _ => { return Err(Error::InvalidOperatorSpec { reason: "Source measurement mut be classification".to_string(), - }) + }); } }; @@ -418,8 +418,8 @@ mod tests { Grid2D, RasterDataType, RasterTile2D, TileInformation, TilingSpecification, }; use geoengine_datatypes::spatial_reference::SpatialReference; - use geoengine_datatypes::util::test::TestDefault; use geoengine_datatypes::util::Identifier; + use geoengine_datatypes::util::test::TestDefault; use geoengine_datatypes::{ collections::{DataCollection, VectorDataType}, primitives::MultiPoint, @@ -471,11 +471,13 @@ mod tests { let execution_context = MockExecutionContext::test_default(); - assert!(histogram - .boxed() - .initialize(WorkflowOperatorPath::initialize_root(), &execution_context) - .await - .is_err()); + assert!( + histogram + .boxed() + .initialize(WorkflowOperatorPath::initialize_root(), &execution_context) + .await + .is_err() + ); } fn mock_raster_source() -> Box { @@ -678,22 +680,24 @@ mod tests { ); let vector_source = MockFeatureCollectionSource::with_collections_and_measurements( - vec![DataCollection::from_slices( - &[] as &[NoGeometry], - &[TimeInterval::default(); 6], - &[( - "foo", - FeatureData::NullableFloat(vec![ - Some(1.), - Some(2.), - None, - Some(4.), - None, - Some(5.), - ]), - )], - ) - .unwrap()], + vec![ + DataCollection::from_slices( + &[] as &[NoGeometry], + &[TimeInterval::default(); 6], + &[( + "foo", + FeatureData::NullableFloat(vec![ + Some(1.), + Some(2.), + None, + Some(4.), + None, + Some(5.), + ]), + )], + ) + .unwrap(), + ], [("foo".to_string(), measurement)].into_iter().collect(), ) .boxed(); @@ -958,12 +962,14 @@ mod tests { ); let vector_source = MockFeatureCollectionSource::with_collections_and_measurements( - vec![DataCollection::from_slices( - &[] as &[NoGeometry], - &[] as &[TimeInterval], - &[("foo", FeatureData::Float(vec![]))], - ) - .unwrap()], + vec![ + DataCollection::from_slices( + &[] as &[NoGeometry], + &[] as &[TimeInterval], + &[("foo", FeatureData::Float(vec![]))], + ) + .unwrap(), + ], [("foo".to_string(), measurement)].into_iter().collect(), ) .boxed(); @@ -1021,12 +1027,14 @@ mod tests { ); let vector_source = MockFeatureCollectionSource::with_collections_and_measurements( - vec![DataCollection::from_slices( - &[] as &[NoGeometry], - &[TimeInterval::default()], - &[("foo", FeatureData::Float(vec![5.0]))], - ) - .unwrap()], + vec![ + DataCollection::from_slices( + &[] as &[NoGeometry], + &[TimeInterval::default()], + &[("foo", FeatureData::Float(vec![5.0]))], + ) + .unwrap(), + ], [("foo".to_string(), measurement)].into_iter().collect(), ) .boxed(); diff --git a/operators/src/plot/histogram.rs b/operators/src/plot/histogram.rs index 04b961ee9f..ace482946c 100644 --- a/operators/src/plot/histogram.rs +++ b/operators/src/plot/histogram.rs @@ -8,8 +8,8 @@ use crate::engine::{QueryProcessor, WorkflowOperatorPath}; use crate::error; use crate::error::Error; use crate::string_token; -use crate::util::input::RasterOrVectorOperator; use crate::util::Result; +use crate::util::input::RasterOrVectorOperator; use async_trait::async_trait; use float_cmp::approx_eq; use futures::stream::BoxStream; @@ -673,8 +673,8 @@ mod tests { EmptyGrid2D, Grid2D, RasterDataType, RasterTile2D, TileInformation, TilingSpecification, }; use geoengine_datatypes::spatial_reference::SpatialReference; - use geoengine_datatypes::util::test::TestDefault; use geoengine_datatypes::util::Identifier; + use geoengine_datatypes::util::test::TestDefault; use geoengine_datatypes::{ collections::{DataCollection, VectorDataType}, primitives::MultiPoint, @@ -788,11 +788,13 @@ mod tests { let execution_context = MockExecutionContext::test_default(); - assert!(histogram - .boxed() - .initialize(WorkflowOperatorPath::initialize_root(), &execution_context) - .await - .is_err()); + assert!( + histogram + .boxed() + .initialize(WorkflowOperatorPath::initialize_root(), &execution_context) + .await + .is_err() + ); } fn mock_raster_source() -> Box { @@ -1330,12 +1332,14 @@ mod tests { #[tokio::test] async fn feature_collection_with_one_feature() { let vector_source = MockFeatureCollectionSource::with_collections_and_measurements( - vec![DataCollection::from_slices( - &[] as &[NoGeometry], - &[TimeInterval::default()], - &[("foo", FeatureData::Float(vec![5.0]))], - ) - .unwrap()], + vec![ + DataCollection::from_slices( + &[] as &[NoGeometry], + &[TimeInterval::default()], + &[("foo", FeatureData::Float(vec![5.0]))], + ) + .unwrap(), + ], [( "foo".to_string(), Measurement::continuous("bar".to_string(), None), diff --git a/operators/src/plot/pie_chart.rs b/operators/src/plot/pie_chart.rs index b3c7bf5bd7..8423d5b3eb 100644 --- a/operators/src/plot/pie_chart.rs +++ b/operators/src/plot/pie_chart.rs @@ -301,8 +301,8 @@ mod tests { }; use geoengine_datatypes::primitives::{CacheTtlSeconds, VectorQueryRectangle}; use geoengine_datatypes::spatial_reference::SpatialReference; - use geoengine_datatypes::util::test::TestDefault; use geoengine_datatypes::util::Identifier; + use geoengine_datatypes::util::test::TestDefault; use geoengine_datatypes::{ collections::{DataCollection, VectorDataType}, primitives::MultiPoint, @@ -434,22 +434,24 @@ mod tests { let measurement = Measurement::continuous("foo".to_string(), None); let vector_source = MockFeatureCollectionSource::with_collections_and_measurements( - vec![DataCollection::from_slices( - &[] as &[NoGeometry], - &[TimeInterval::default(); 6], - &[( - "foo", - FeatureData::NullableFloat(vec![ - Some(1.), - Some(2.), - None, - Some(1.), - None, - Some(3.), - ]), - )], - ) - .unwrap()], + vec![ + DataCollection::from_slices( + &[] as &[NoGeometry], + &[TimeInterval::default(); 6], + &[( + "foo", + FeatureData::NullableFloat(vec![ + Some(1.), + Some(2.), + None, + Some(1.), + None, + Some(3.), + ]), + )], + ) + .unwrap(), + ], [("foo".to_string(), measurement)].into_iter().collect(), ) .boxed(); @@ -721,12 +723,14 @@ mod tests { ); let vector_source = MockFeatureCollectionSource::with_collections_and_measurements( - vec![DataCollection::from_slices( - &[] as &[NoGeometry], - &[] as &[TimeInterval], - &[("foo", FeatureData::Float(vec![]))], - ) - .unwrap()], + vec![ + DataCollection::from_slices( + &[] as &[NoGeometry], + &[] as &[TimeInterval], + &[("foo", FeatureData::Float(vec![]))], + ) + .unwrap(), + ], [("foo".to_string(), measurement)].into_iter().collect(), ) .boxed(); @@ -786,12 +790,14 @@ mod tests { ); let vector_source = MockFeatureCollectionSource::with_collections_and_measurements( - vec![DataCollection::from_slices( - &[] as &[NoGeometry], - &[TimeInterval::default()], - &[("foo", FeatureData::Float(vec![5.0]))], - ) - .unwrap()], + vec![ + DataCollection::from_slices( + &[] as &[NoGeometry], + &[TimeInterval::default()], + &[("foo", FeatureData::Float(vec![5.0]))], + ) + .unwrap(), + ], [("foo".to_string(), measurement)].into_iter().collect(), ) .boxed(); diff --git a/operators/src/plot/scatter_plot.rs b/operators/src/plot/scatter_plot.rs index 7d69da0af2..c419d6d145 100644 --- a/operators/src/plot/scatter_plot.rs +++ b/operators/src/plot/scatter_plot.rs @@ -255,13 +255,13 @@ impl CollectorKind { fn add_batch(&mut self, values: impl Iterator) -> Result<()> { match self { - Self::Values(ref mut c) => { + Self::Values(c) => { c.add_batch(values); if c.element_count() > COLLECTOR_TO_HISTOGRAM_THRESHOLD { *self = Self::Histogram(Self::histogram_from_collector(c)?); } } - Self::Histogram(ref mut h) => { + Self::Histogram(h) => { h.update_batch(values); } } @@ -401,8 +401,8 @@ mod tests { #[tokio::test] async fn vector_data_with_nulls_and_nan() { - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 7], &[ @@ -432,8 +432,9 @@ mod tests { ), ], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let box_plot = ScatterPlot { params: ScatterPlotParams { @@ -609,8 +610,8 @@ mod tests { #[tokio::test] async fn vector_data_single_feature() { - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 1], &[ @@ -618,8 +619,9 @@ mod tests { ("bar", FeatureData::Int(vec![1])), ], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let box_plot = ScatterPlot { params: ScatterPlotParams { @@ -663,8 +665,8 @@ mod tests { #[tokio::test] async fn vector_data_empty() { - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[] as &[TimeInterval], &[ @@ -672,8 +674,9 @@ mod tests { ("bar", FeatureData::Int(vec![])), ], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let box_plot = ScatterPlot { params: ScatterPlotParams { @@ -719,8 +722,8 @@ mod tests { let mut values = vec![1; 700]; values.push(2); - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 701], &[ @@ -728,8 +731,9 @@ mod tests { ("bar", FeatureData::Int(values.clone())), ], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let box_plot = ScatterPlot { params: ScatterPlotParams { diff --git a/operators/src/plot/statistics.rs b/operators/src/plot/statistics.rs index 3aa94c5156..d8b06deba2 100644 --- a/operators/src/plot/statistics.rs +++ b/operators/src/plot/statistics.rs @@ -7,17 +7,17 @@ use crate::engine::{ }; use crate::error; use crate::error::Error; +use crate::util::Result; use crate::util::input::MultiRasterOrVectorOperator; use crate::util::number_statistics::NumberStatistics; use crate::util::statistics::{SafePSquareQuantileEstimator, StatisticsError}; -use crate::util::Result; use async_trait::async_trait; use futures::stream::select_all; use futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt}; use geoengine_datatypes::collections::FeatureCollectionInfos; use geoengine_datatypes::primitives::{ - partitions_extent, time_interval_extent, AxisAlignedRectangle, BandSelection, BoundingBox2D, - PlotQueryRectangle, RasterQueryRectangle, + AxisAlignedRectangle, BandSelection, BoundingBox2D, PlotQueryRectangle, RasterQueryRectangle, + partitions_extent, time_interval_extent, }; use geoengine_datatypes::raster::ConvertDataTypeParallel; use geoengine_datatypes::raster::{GridOrEmpty, GridSize}; @@ -1048,8 +1048,8 @@ mod tests { tile_size_in_pixels, }; - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 7], &[ @@ -1079,8 +1079,9 @@ mod tests { ), ], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let statistics = Statistics { params: StatisticsParams { @@ -1147,8 +1148,8 @@ mod tests { tile_size_in_pixels, }; - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 7], &[ @@ -1178,8 +1179,9 @@ mod tests { ), ], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let statistics = Statistics { params: StatisticsParams { @@ -1238,8 +1240,8 @@ mod tests { tile_size_in_pixels, }; - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 7], &[ @@ -1269,8 +1271,9 @@ mod tests { ), ], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let statistics = Statistics { params: StatisticsParams { @@ -1424,8 +1427,8 @@ mod tests { tile_size_in_pixels, }; - let vector_source = - MockFeatureCollectionSource::multiple(vec![DataCollection::from_slices( + let vector_source = MockFeatureCollectionSource::multiple(vec![ + DataCollection::from_slices( &[] as &[NoGeometry], &[TimeInterval::default(); 7], &[ @@ -1455,8 +1458,9 @@ mod tests { ), ], ) - .unwrap()]) - .boxed(); + .unwrap(), + ]) + .boxed(); let statistics = Statistics { params: StatisticsParams { diff --git a/operators/src/plot/temporal_raster_mean_plot.rs b/operators/src/plot/temporal_raster_mean_plot.rs index 5b33f47f7e..664b49261e 100644 --- a/operators/src/plot/temporal_raster_mean_plot.rs +++ b/operators/src/plot/temporal_raster_mean_plot.rs @@ -4,11 +4,11 @@ use crate::engine::{ PlotResultDescriptor, QueryContext, QueryProcessor, RasterQueryProcessor, SingleRasterSource, TypedPlotQueryProcessor, WorkflowOperatorPath, }; -use crate::util::math::average_floor; use crate::util::Result; +use crate::util::math::average_floor; use async_trait::async_trait; -use futures::stream::BoxStream; use futures::StreamExt; +use futures::stream::BoxStream; use geoengine_datatypes::plots::{AreaLineChart, Plot, PlotData}; use geoengine_datatypes::primitives::{ BandSelection, Measurement, PlotQueryRectangle, RasterQueryRectangle, TimeInstance, @@ -280,7 +280,7 @@ mod tests { raster::{Grid2D, RasterDataType, TileInformation}, util::test::TestDefault, }; - use serde_json::{json, Value}; + use serde_json::{Value, json}; #[test] fn serialization() { @@ -341,11 +341,13 @@ mod tests { }, sources: SingleRasterSource { raster: generate_mock_raster_source( - vec![TimeInterval::new( - TimeInstance::from(DateTime::new_utc(1990, 1, 1, 0, 0, 0)), - TimeInstance::from(DateTime::new_utc(2000, 1, 1, 0, 0, 0)), - ) - .unwrap()], + vec![ + TimeInterval::new( + TimeInstance::from(DateTime::new_utc(1990, 1, 1, 0, 0, 0)), + TimeInstance::from(DateTime::new_utc(2000, 1, 1, 0, 0, 0)), + ) + .unwrap(), + ], vec![vec![1, 2, 3, 4, 5, 6]], ), }, diff --git a/operators/src/plot/temporal_vector_line_plot.rs b/operators/src/plot/temporal_vector_line_plot.rs index 25cdd3ebde..f6066ff6c5 100644 --- a/operators/src/plot/temporal_vector_line_plot.rs +++ b/operators/src/plot/temporal_vector_line_plot.rs @@ -284,7 +284,7 @@ mod tests { BoundingBox2D, DateTime, FeatureData, MultiPoint, SpatialResolution, TimeInterval, }, }; - use serde_json::{json, Value}; + use serde_json::{Value, json}; use crate::{ engine::{ChunkByteSize, MockExecutionContext, MockQueryContext, VectorOperator}, diff --git a/operators/src/processing/band_neighborhood_aggregate/mod.rs b/operators/src/processing/band_neighborhood_aggregate/mod.rs index 74d9d097fa..c97388c353 100644 --- a/operators/src/processing/band_neighborhood_aggregate/mod.rs +++ b/operators/src/processing/band_neighborhood_aggregate/mod.rs @@ -69,7 +69,9 @@ pub enum BandNeighborhoodAggregateError { ))] FirstDerivativeNeedsAtLeastTwoBands, - #[snafu(display("The distance of the bands for computing the first derivative must be positive, found {distance}."))] + #[snafu(display( + "The distance of the bands for computing the first derivative must be positive, found {distance}." + ))] FirstDerivativeDistanceMustBePositive { distance: f64 }, #[snafu(display("The window size for the average must be odd, found {window_size}."))] @@ -682,11 +684,7 @@ impl Accu for MovingAverageAccu { self.output_band_idx + window_radius }; - if self - .input_band_tiles - .back() - .map_or(true, |t| t.0 < last_band) - { + if self.input_band_tiles.back().is_none_or(|t| t.0 < last_band) { // not enough bands for the window return None; } @@ -807,50 +805,53 @@ mod tests { assert!(accu.next_band_tile().is_none()); accu.add_tile(data.remove(0)).unwrap(); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 0, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![2., 2., 2., 2.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - })); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 0, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![2., 2., 2., 2.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }) + ); accu.add_tile(data.remove(0)).unwrap(); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 1, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![2., 2., 2., 2.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - }),); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 2, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![2., 2., 2., 2.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - })); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 1, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![2., 2., 2., 2.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }), + ); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 2, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![2., 2., 2., 2.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }) + ); assert!(std::panic::catch_unwind(move || accu.next_band_tile()).is_err()); } @@ -899,50 +900,53 @@ mod tests { assert!(accu.next_band_tile().is_none()); accu.add_tile(data.remove(0)).unwrap(); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 0, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![2. / 3., 2. / 3., 2. / 3., 2. / 3.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - })); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 0, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![2. / 3., 2. / 3., 2. / 3., 2. / 3.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }) + ); accu.add_tile(data.remove(0)).unwrap(); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 1, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![2. / 3., 2. / 3., 2. / 3., 2. / 3.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - }),); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 2, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![2. / 3., 2. / 3., 2. / 3., 2. / 3.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - })); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 1, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![2. / 3., 2. / 3., 2. / 3., 2. / 3.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }), + ); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 2, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![2. / 3., 2. / 3., 2. / 3., 2. / 3.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }) + ); assert!(std::panic::catch_unwind(move || accu.next_band_tile()).is_err()); } @@ -991,50 +995,53 @@ mod tests { assert!(accu.next_band_tile().is_none()); accu.add_tile(data.remove(0)).unwrap(); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 0, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![1., 2., 3., 4.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - })); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 0, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![1., 2., 3., 4.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }) + ); accu.add_tile(data.remove(0)).unwrap(); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 1, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![2., 3., 4., 5.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - })); - assert!(accu - .next_band_tile() - .unwrap() - .tiles_equal_ignoring_cache_hint(&RasterTile2D { - time: TimeInterval::new_unchecked(0, 5), - tile_position: [-1, 0].into(), - band: 2, - global_geo_transform: TestDefault::test_default(), - grid_array: Grid::new([2, 2].into(), vec![3., 4., 5., 6.]) - .unwrap() - .into(), - properties: Default::default(), - cache_hint: CacheHint::default(), - })); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 1, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![2., 3., 4., 5.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }) + ); + assert!( + accu.next_band_tile() + .unwrap() + .tiles_equal_ignoring_cache_hint(&RasterTile2D { + time: TimeInterval::new_unchecked(0, 5), + tile_position: [-1, 0].into(), + band: 2, + global_geo_transform: TestDefault::test_default(), + grid_array: Grid::new([2, 2].into(), vec![3., 4., 5., 6.]) + .unwrap() + .into(), + properties: Default::default(), + cache_hint: CacheHint::default(), + }) + ); assert!(std::panic::catch_unwind(move || accu.next_band_tile()).is_err()); } diff --git a/operators/src/processing/bandwise_expression/mod.rs b/operators/src/processing/bandwise_expression/mod.rs index a5ef8dd0b0..883200e5b4 100644 --- a/operators/src/processing/bandwise_expression/mod.rs +++ b/operators/src/processing/bandwise_expression/mod.rs @@ -19,8 +19,8 @@ use geoengine_expression::{ }; use serde::{Deserialize, Serialize}; -use super::expression::get_expression_dependencies; use super::RasterExpressionError; +use super::expression::get_expression_dependencies; #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] diff --git a/operators/src/processing/circle_merging_quadtree/aggregates.rs b/operators/src/processing/circle_merging_quadtree/aggregates.rs index 5a1d32bab5..0f49f965ae 100644 --- a/operators/src/processing/circle_merging_quadtree/aggregates.rs +++ b/operators/src/processing/circle_merging_quadtree/aggregates.rs @@ -75,7 +75,7 @@ impl AttributeAggregate { // if there is null on the other side, just leave it as it is (_, AttributeAggregate::Null) => Ok(()), // if there is null on this side, just take the other side - (this @ &mut AttributeAggregate::Null, other) => { + &mut (ref mut this @ &mut AttributeAggregate::Null, ref mut other) => { **this = other.clone(); Ok(()) } diff --git a/operators/src/processing/circle_merging_quadtree/hash_map.rs b/operators/src/processing/circle_merging_quadtree/hash_map.rs index 9d1930d4dd..14d7da3123 100644 --- a/operators/src/processing/circle_merging_quadtree/hash_map.rs +++ b/operators/src/processing/circle_merging_quadtree/hash_map.rs @@ -304,8 +304,8 @@ where #[cfg(test)] mod tests { - use rand::prelude::SliceRandom; use rand::SeedableRng; + use rand::prelude::SliceRandom; use super::*; diff --git a/operators/src/processing/circle_merging_quadtree/operator.rs b/operators/src/processing/circle_merging_quadtree/operator.rs index d1bb443c89..9a93a832ff 100644 --- a/operators/src/processing/circle_merging_quadtree/operator.rs +++ b/operators/src/processing/circle_merging_quadtree/operator.rs @@ -1,8 +1,8 @@ use std::collections::{HashMap, HashSet}; use async_trait::async_trait; -use futures::stream::{BoxStream, FuturesUnordered}; use futures::StreamExt; +use futures::stream::{BoxStream, FuturesUnordered}; use geoengine_datatypes::collections::{ BuilderProvider, GeoFeatureCollectionRowBuilder, MultiPointCollection, VectorDataType, }; @@ -151,7 +151,7 @@ impl VectorOperator for VisualPointClustering { return Err(Error::InvalidType { expected: "not null".to_string(), found: "null".to_string(), - }) + }); } }; @@ -565,20 +565,22 @@ mod tests { let result: Vec = query.map(Result::unwrap).collect().await; assert_eq!(result.len(), 1); - assert!(result[0].chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &[(0.0, 0.099_999_999_999_999_99), (50.0, 50.1)], - &[TimeInterval::default(); 2], - &[ - ("count", FeatureData::Int(vec![9, 1])), - ( - "radius", - FeatureData::Float(vec![10.197_224_577_336_218, 8.]) - ) - ], + assert!( + result[0].chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &[(0.0, 0.099_999_999_999_999_99), (50.0, 50.1)], + &[TimeInterval::default(); 2], + &[ + ("count", FeatureData::Int(vec![9, 1])), + ( + "radius", + FeatureData::Float(vec![10.197_224_577_336_218, 8.]) + ) + ], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -647,21 +649,23 @@ mod tests { let result: Vec = query.map(Result::unwrap).collect().await; assert_eq!(result.len(), 1); - assert!(result[0].chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &[(0.0, 0.099_999_999_999_999_99), (50.0, 50.1)], - &[TimeInterval::default(); 2], - &[ - ("count", FeatureData::Int(vec![9, 1])), - ( - "radius", - FeatureData::Float(vec![10.197_224_577_336_218, 8.]) - ), - ("bar", FeatureData::Float(vec![5., 10.])) - ], + assert!( + result[0].chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &[(0.0, 0.099_999_999_999_999_99), (50.0, 50.1)], + &[TimeInterval::default(); 2], + &[ + ("count", FeatureData::Int(vec![9, 1])), + ( + "radius", + FeatureData::Float(vec![10.197_224_577_336_218, 8.]) + ), + ("bar", FeatureData::Float(vec![5., 10.])) + ], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -730,21 +734,23 @@ mod tests { let result: Vec = query.map(Result::unwrap).collect().await; assert_eq!(result.len(), 1); - assert!(result[0].chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &[(0.0, 0.1), (50.0, 50.1)], - &[TimeInterval::default(); 2], - &[ - ("count", FeatureData::Int(vec![2, 2])), - ( - "radius", - FeatureData::Float(vec![8.693_147_180_559_945, 8.693_147_180_559_945]) - ), - ("foo", FeatureData::NullableFloat(vec![Some(1.), None])) - ], + assert!( + result[0].chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &[(0.0, 0.1), (50.0, 50.1)], + &[TimeInterval::default(); 2], + &[ + ("count", FeatureData::Int(vec![2, 2])), + ( + "radius", + FeatureData::Float(vec![8.693_147_180_559_945, 8.693_147_180_559_945]) + ), + ("foo", FeatureData::NullableFloat(vec![Some(1.), None])) + ], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -821,32 +827,34 @@ mod tests { let result: Vec = query.map(Result::unwrap).collect().await; assert_eq!(result.len(), 1); - assert!(result[0].chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &[(0.0, 0.1), (50.0, 50.1), (25.0, 25.1)], - &[TimeInterval::default(); 3], - &[ - ("count", FeatureData::Int(vec![2, 2, 2])), - ( - "radius", - FeatureData::Float(vec![ - 8.693_147_180_559_945, - 8.693_147_180_559_945, - 8.693_147_180_559_945 - ]) - ), - ( - "text", - FeatureData::NullableText(vec![ - Some("foo, bar".to_string()), - Some("foo".to_string()), - None - ]) - ) - ], + assert!( + result[0].chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &[(0.0, 0.1), (50.0, 50.1), (25.0, 25.1)], + &[TimeInterval::default(); 3], + &[ + ("count", FeatureData::Int(vec![2, 2, 2])), + ( + "radius", + FeatureData::Float(vec![ + 8.693_147_180_559_945, + 8.693_147_180_559_945, + 8.693_147_180_559_945 + ]) + ), + ( + "text", + FeatureData::NullableText(vec![ + Some("foo, bar".to_string()), + Some("foo".to_string()), + None + ]) + ) + ], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] diff --git a/operators/src/processing/column_range_filter.rs b/operators/src/processing/column_range_filter.rs index d4c053b120..136afa6dc8 100644 --- a/operators/src/processing/column_range_filter.rs +++ b/operators/src/processing/column_range_filter.rs @@ -4,12 +4,12 @@ use crate::engine::{ VectorQueryProcessor, VectorResultDescriptor, WorkflowOperatorPath, }; use crate::error; -use crate::util::input::StringOrNumberRange; use crate::util::Result; +use crate::util::input::StringOrNumberRange; use crate::{adapters::FeatureCollectionChunkMerger, engine::SingleVectorSource}; use async_trait::async_trait; -use futures::stream::BoxStream; use futures::StreamExt; +use futures::stream::BoxStream; use geoengine_datatypes::collections::{ FeatureCollection, FeatureCollectionInfos, FeatureCollectionModifications, }; diff --git a/operators/src/processing/expression/mod.rs b/operators/src/processing/expression/mod.rs index 2d81423582..e494109961 100644 --- a/operators/src/processing/expression/mod.rs +++ b/operators/src/processing/expression/mod.rs @@ -13,7 +13,7 @@ use geoengine_datatypes::primitives::{ AsGeoOption, MultiLineString, MultiLineStringRef, MultiPoint, MultiPointRef, MultiPolygon, MultiPolygonRef, NoGeometry, }; -use geoengine_expression::{error::ExpressionExecutionError, ExpressionDependencies}; +use geoengine_expression::{ExpressionDependencies, error::ExpressionExecutionError}; use std::sync::{Arc, OnceLock}; /// The expression dependencies are initialized once and then reused for all expression evaluations. @@ -26,8 +26,8 @@ static EXPRESSION_DEPENDENCIES: OnceLock< /// /// If it fails, you can retry or terminate the program. /// -pub async fn initialize_expression_dependencies( -) -> Result<(), ExpressionDependenciesInitializationError> { +pub async fn initialize_expression_dependencies() +-> Result<(), ExpressionDependenciesInitializationError> { crate::util::spawn_blocking(|| { let dependencies = ExpressionDependencies::new()?; @@ -38,13 +38,13 @@ pub async fn initialize_expression_dependencies( .await? } -fn generate_expression_dependencies( -) -> Result> { +fn generate_expression_dependencies() +-> Result> { ExpressionDependencies::new().map_err(Arc::new) } -pub fn get_expression_dependencies( -) -> Result<&'static ExpressionDependencies, Arc> { +pub fn get_expression_dependencies() +-> Result<&'static ExpressionDependencies, Arc> { EXPRESSION_DEPENDENCIES .get_or_init(generate_expression_dependencies) .as_ref() @@ -54,7 +54,7 @@ pub fn get_expression_dependencies( /// Replaces all non-alphanumeric characters in a string with underscores. /// Prepends an underscore if the string is empty or starts with a number. fn canonicalize_name(name: &str) -> String { - let prepend_underscore = name.chars().next().map_or(true, char::is_numeric); + let prepend_underscore = name.chars().next().is_none_or(char::is_numeric); let mut canonicalized_name = String::with_capacity(name.len() + usize::from(prepend_underscore)); diff --git a/operators/src/processing/expression/raster_operator.rs b/operators/src/processing/expression/raster_operator.rs index 82799d69cd..cd1cb84295 100644 --- a/operators/src/processing/expression/raster_operator.rs +++ b/operators/src/processing/expression/raster_operator.rs @@ -1,7 +1,6 @@ use super::{ - get_expression_dependencies, + RasterExpressionError, get_expression_dependencies, raster_query_processor::{ExpressionInput, ExpressionQueryProcessor}, - RasterExpressionError, }; use crate::{ engine::{ @@ -111,10 +110,11 @@ impl RasterOperator for Expression { time: in_descriptor.time, bbox: in_descriptor.bbox, resolution: in_descriptor.resolution, - bands: RasterBandDescriptors::new(vec![self - .params - .output_band - .unwrap_or(RasterBandDescriptor::new_unitless("expression".into()))])?, + bands: RasterBandDescriptors::new(vec![ + self.params + .output_band + .unwrap_or(RasterBandDescriptor::new_unitless("expression".into())), + ])?, }; let initialized_operator = InitializedExpression { diff --git a/operators/src/processing/expression/raster_query_processor.rs b/operators/src/processing/expression/raster_query_processor.rs index 2379a9bd3c..09f9f65a2f 100644 --- a/operators/src/processing/expression/raster_query_processor.rs +++ b/operators/src/processing/expression/raster_query_processor.rs @@ -4,7 +4,7 @@ use crate::{ util::Result, }; use async_trait::async_trait; -use futures::{stream::BoxStream, StreamExt, TryStreamExt}; +use futures::{StreamExt, TryStreamExt, stream::BoxStream}; use geoengine_datatypes::{ primitives::{ BandSelection, CacheHint, RasterQueryRectangle, SpatialPartition2D, TimeInterval, diff --git a/operators/src/processing/expression/vector_operator.rs b/operators/src/processing/expression/vector_operator.rs index 907826a25a..4655e2bdf2 100644 --- a/operators/src/processing/expression/vector_operator.rs +++ b/operators/src/processing/expression/vector_operator.rs @@ -1,6 +1,6 @@ use super::{ - canonicalize_name, error::vector as error, get_expression_dependencies, AsExpressionGeo, - FromExpressionGeo, VectorExpressionError, + AsExpressionGeo, FromExpressionGeo, VectorExpressionError, canonicalize_name, + error::vector as error, get_expression_dependencies, }; use crate::{ engine::{ @@ -12,8 +12,8 @@ use crate::{ util::Result, }; use async_trait::async_trait; -use futures::stream::BoxStream; use futures::StreamExt; +use futures::stream::BoxStream; use geoengine_datatypes::primitives::{ FeatureData, FeatureDataRef, FeatureDataType, FloatOptionsParIter, Geometry, Measurement, MultiLineString, MultiPoint, MultiPolygon, VectorQueryRectangle, @@ -28,8 +28,8 @@ use geoengine_datatypes::{ primitives::NoGeometry, }; use geoengine_expression::{ - is_allowed_variable_name, DataType, ExpressionParser, LinkedExpression, - Parameter as ExpressionParameter, + DataType, ExpressionParser, LinkedExpression, Parameter as ExpressionParameter, + is_allowed_variable_name, }; use rayon::iter::{ FromParallelIterator, IndexedParallelIterator, IntoParallelIterator, ParallelIterator, diff --git a/operators/src/processing/interpolation/mod.rs b/operators/src/processing/interpolation/mod.rs index 96a71d1833..e897c73373 100644 --- a/operators/src/processing/interpolation/mod.rs +++ b/operators/src/processing/interpolation/mod.rs @@ -25,7 +25,7 @@ use geoengine_datatypes::raster::{ }; use rayon::ThreadPool; use serde::{Deserialize, Serialize}; -use snafu::{ensure, Snafu}; +use snafu::{Snafu, ensure}; #[derive(Debug, Serialize, Deserialize, Clone)] #[serde(rename_all = "camelCase")] @@ -209,11 +209,11 @@ where impl QueryProcessor for InterploationProcessor where Q: QueryProcessor< - Output = RasterTile2D

, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D

, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, P: Pixel, I: InterpolationAlgorithm

, { @@ -380,7 +380,7 @@ pub fn create_accu>( query_rect: &RasterQueryRectangle, pool: Arc, tiling_specification: TilingSpecification, -) -> impl Future>> { +) -> impl Future>> + use { // create an accumulator as a single tile that fits all the input tiles let spatial_bounds = query_rect.spatial_bounds; let spatial_resolution = query_rect.spatial_resolution; diff --git a/operators/src/processing/line_simplification.rs b/operators/src/processing/line_simplification.rs index 59bdda2064..aa389f947d 100644 --- a/operators/src/processing/line_simplification.rs +++ b/operators/src/processing/line_simplification.rs @@ -8,7 +8,7 @@ use crate::{ util::Result, }; use async_trait::async_trait; -use futures::{stream::BoxStream, StreamExt, TryStreamExt}; +use futures::{StreamExt, TryStreamExt, stream::BoxStream}; use geoengine_datatypes::{ collections::{ FeatureCollection, GeoFeatureCollectionModifications, IntoGeometryIterator, VectorDataType, @@ -182,9 +182,9 @@ where G: Geometry, for<'c> FeatureCollection: IntoGeometryIterator<'c>, for<'c> A: LineSimplificationAlgorithmImpl< - as IntoGeometryIterator<'c>>::GeometryType, - G, - >, + as IntoGeometryIterator<'c>>::GeometryType, + G, + >, { source: P, _algorithm: A, @@ -262,9 +262,9 @@ where G: Geometry, for<'c> FeatureCollection: IntoGeometryIterator<'c> + GeoFeatureCollectionModifications, for<'c> A: LineSimplificationAlgorithmImpl< - as IntoGeometryIterator<'c>>::GeometryType, - G, - >, + as IntoGeometryIterator<'c>>::GeometryType, + G, + >, { fn simplify(collection: &FeatureCollection, epsilon: f64) -> Result> { // TODO: chunk within parallelization to reduce overhead if necessary @@ -285,17 +285,17 @@ where impl QueryProcessor for LineSimplificationProcessor where P: QueryProcessor< - Output = FeatureCollection, - SpatialBounds = BoundingBox2D, - Selection = ColumnSelection, - ResultDescription = VectorResultDescriptor, - >, + Output = FeatureCollection, + SpatialBounds = BoundingBox2D, + Selection = ColumnSelection, + ResultDescription = VectorResultDescriptor, + >, G: Geometry + ArrowTyped + 'static, for<'c> FeatureCollection: IntoGeometryIterator<'c> + GeoFeatureCollectionModifications, for<'c> A: LineSimplificationAlgorithmImpl< - as IntoGeometryIterator<'c>>::GeometryType, - G, - >, + as IntoGeometryIterator<'c>>::GeometryType, + G, + >, { type Output = FeatureCollection; type SpatialBounds = BoundingBox2D; @@ -364,7 +364,7 @@ mod tests { }, spatial_reference::SpatialReference, test_data, - util::{test::TestDefault, Identifier}, + util::{Identifier, test::TestDefault}, }; #[tokio::test] @@ -409,64 +409,70 @@ mod tests { #[tokio::test] async fn test_errors() { // zero epsilon - assert!(LineSimplification { - params: LineSimplificationParams { - epsilon: Some(0.0), - algorithm: LineSimplificationAlgorithm::DouglasPeucker, - }, - sources: MockFeatureCollectionSource::::single( - MultiPolygonCollection::empty() - ) + assert!( + LineSimplification { + params: LineSimplificationParams { + epsilon: Some(0.0), + algorithm: LineSimplificationAlgorithm::DouglasPeucker, + }, + sources: MockFeatureCollectionSource::::single( + MultiPolygonCollection::empty() + ) + .boxed() + .into(), + } .boxed() - .into(), - } - .boxed() - .initialize( - WorkflowOperatorPath::initialize_root(), - &MockExecutionContext::test_default() - ) - .await - .is_err()); + .initialize( + WorkflowOperatorPath::initialize_root(), + &MockExecutionContext::test_default() + ) + .await + .is_err() + ); // invalid epsilon - assert!(LineSimplification { - params: LineSimplificationParams { - epsilon: Some(f64::NAN), - algorithm: LineSimplificationAlgorithm::Visvalingam, - }, - sources: MockFeatureCollectionSource::::single( - MultiPolygonCollection::empty() - ) + assert!( + LineSimplification { + params: LineSimplificationParams { + epsilon: Some(f64::NAN), + algorithm: LineSimplificationAlgorithm::Visvalingam, + }, + sources: MockFeatureCollectionSource::::single( + MultiPolygonCollection::empty() + ) + .boxed() + .into(), + } .boxed() - .into(), - } - .boxed() - .initialize( - WorkflowOperatorPath::initialize_root(), - &MockExecutionContext::test_default() - ) - .await - .is_err()); + .initialize( + WorkflowOperatorPath::initialize_root(), + &MockExecutionContext::test_default() + ) + .await + .is_err() + ); // not lines or polygons - assert!(LineSimplification { - params: LineSimplificationParams { - epsilon: None, - algorithm: LineSimplificationAlgorithm::DouglasPeucker, - }, - sources: MockFeatureCollectionSource::::single( - MultiPointCollection::empty() - ) + assert!( + LineSimplification { + params: LineSimplificationParams { + epsilon: None, + algorithm: LineSimplificationAlgorithm::DouglasPeucker, + }, + sources: MockFeatureCollectionSource::::single( + MultiPointCollection::empty() + ) + .boxed() + .into(), + } .boxed() - .into(), - } - .boxed() - .initialize( - WorkflowOperatorPath::initialize_root(), - &MockExecutionContext::test_default() - ) - .await - .is_err()); + .initialize( + WorkflowOperatorPath::initialize_root(), + &MockExecutionContext::test_default() + ) + .await + .is_err() + ); } #[tokio::test] diff --git a/operators/src/processing/map_query.rs b/operators/src/processing/map_query.rs index 8ee501ab18..3d5e0097d3 100644 --- a/operators/src/processing/map_query.rs +++ b/operators/src/processing/map_query.rs @@ -7,8 +7,8 @@ use crate::engine::{ }; use crate::util::Result; use async_trait::async_trait; -use futures::stream::BoxStream; use futures::StreamExt; +use futures::stream::BoxStream; use geoengine_datatypes::primitives::{RasterQueryRectangle, VectorQueryRectangle}; use geoengine_datatypes::raster::{RasterTile2D, TilingSpecification}; diff --git a/operators/src/processing/meteosat/radiance.rs b/operators/src/processing/meteosat/radiance.rs index fc38789a58..db3dbae9ec 100644 --- a/operators/src/processing/meteosat/radiance.rs +++ b/operators/src/processing/meteosat/radiance.rs @@ -83,7 +83,7 @@ impl RasterOperator for Radiance { return Err(Error::InvalidMeasurement { expected: "raw".into(), found: m.clone(), - }) + }); } Measurement::Classification(ClassificationMeasurement { measurement: m, @@ -92,13 +92,13 @@ impl RasterOperator for Radiance { return Err(Error::InvalidMeasurement { expected: "raw".into(), found: m.clone(), - }) + }); } Measurement::Unitless => { return Err(Error::InvalidMeasurement { expected: "raw".into(), found: "unitless".into(), - }) + }); } // OK Case Measurement::Continuous(ContinuousMeasurement { @@ -249,11 +249,11 @@ where impl QueryProcessor for RadianceProcessor where Q: QueryProcessor< - Output = RasterTile2D

, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D

, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, P: Pixel, { type Output = RasterTile2D; diff --git a/operators/src/processing/meteosat/reflectance.rs b/operators/src/processing/meteosat/reflectance.rs index 0a321969ba..1aa12a99cb 100644 --- a/operators/src/processing/meteosat/reflectance.rs +++ b/operators/src/processing/meteosat/reflectance.rs @@ -7,10 +7,10 @@ use crate::engine::{ TypedRasterQueryProcessor, WorkflowOperatorPath, }; use crate::util::Result; +use TypedRasterQueryProcessor::F32 as QueryProcessorOut; use async_trait::async_trait; use num_traits::AsPrimitive; use rayon::ThreadPool; -use TypedRasterQueryProcessor::F32 as QueryProcessorOut; use crate::error::Error; use futures::stream::BoxStream; @@ -88,7 +88,7 @@ impl RasterOperator for Reflectance { return Err(Error::InvalidMeasurement { expected: "radiance".into(), found: m.clone(), - }) + }); } Measurement::Classification(ClassificationMeasurement { measurement: m, @@ -97,13 +97,13 @@ impl RasterOperator for Reflectance { return Err(Error::InvalidMeasurement { expected: "radiance".into(), found: m.clone(), - }) + }); } Measurement::Unitless => { return Err(Error::InvalidMeasurement { expected: "radiance".into(), found: "unitless".into(), - }) + }); } // OK Case Measurement::Continuous(ContinuousMeasurement { @@ -297,11 +297,11 @@ fn calculate_esd(timestamp: &DateTime) -> f64 { impl QueryProcessor for ReflectanceProcessor where Q: QueryProcessor< - Output = RasterTile2D, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, { type Output = RasterTile2D; type SpatialBounds = SpatialPartition2D; diff --git a/operators/src/processing/meteosat/satellite.rs b/operators/src/processing/meteosat/satellite.rs index 8c9e1605bf..7c47ba8431 100644 --- a/operators/src/processing/meteosat/satellite.rs +++ b/operators/src/processing/meteosat/satellite.rs @@ -598,17 +598,21 @@ mod tests { #[tokio::test] async fn get_channel_ok() { - assert!(Satellite::satellite_by_msg_id(1) - .unwrap() - .channel(0) - .is_ok()); + assert!( + Satellite::satellite_by_msg_id(1) + .unwrap() + .channel(0) + .is_ok() + ); } #[tokio::test] async fn get_channel_fail() { - assert!(Satellite::satellite_by_msg_id(1) - .unwrap() - .channel(42) - .is_err()); + assert!( + Satellite::satellite_by_msg_id(1) + .unwrap() + .channel(42) + .is_err() + ); } } diff --git a/operators/src/processing/meteosat/temperature.rs b/operators/src/processing/meteosat/temperature.rs index 2b62c5f8b7..e693f1babc 100644 --- a/operators/src/processing/meteosat/temperature.rs +++ b/operators/src/processing/meteosat/temperature.rs @@ -83,7 +83,7 @@ impl RasterOperator for Temperature { return Err(Error::InvalidMeasurement { expected: "raw".into(), found: m.clone(), - }) + }); } Measurement::Classification(ClassificationMeasurement { measurement: m, @@ -92,13 +92,13 @@ impl RasterOperator for Temperature { return Err(Error::InvalidMeasurement { expected: "raw".into(), found: m.clone(), - }) + }); } Measurement::Unitless => { return Err(Error::InvalidMeasurement { expected: "raw".into(), found: "unitless".into(), - }) + }); } // OK Case Measurement::Continuous(ContinuousMeasurement { @@ -296,11 +296,11 @@ fn create_lookup_table(channel: &Channel, offset: f64, slope: f64, _pool: &Threa impl QueryProcessor for TemperatureProcessor where Q: QueryProcessor< - Output = RasterTile2D

, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D

, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, P: Pixel, { type Output = RasterTile2D; @@ -416,7 +416,9 @@ mod tests { &MaskedGrid2D::new( Grid2D::new( [3, 2].into(), - vec![300.341_43, 318.617_65, 330.365_14, 339.233_64, 346.443_94, 0.,], + vec![ + 300.341_43, 318.617_65, 330.365_14, 339.233_64, 346.443_94, 0., + ], ) .unwrap(), Grid2D::new([3, 2].into(), vec![true, true, true, true, true, false,],).unwrap(), diff --git a/operators/src/processing/mod.rs b/operators/src/processing/mod.rs index bd76685dac..d80848b554 100644 --- a/operators/src/processing/mod.rs +++ b/operators/src/processing/mod.rs @@ -27,8 +27,8 @@ pub use circle_merging_quadtree::{ InitializedVisualPointClustering, VisualPointClustering, VisualPointClusteringParams, }; pub use expression::{ - initialize_expression_dependencies, Expression, ExpressionParams, RasterExpressionError, - VectorExpression, VectorExpressionError, VectorExpressionParams, + Expression, ExpressionParams, RasterExpressionError, VectorExpression, VectorExpressionError, + VectorExpressionParams, initialize_expression_dependencies, }; pub use interpolation::{Interpolation, InterpolationError, InterpolationParams}; pub use line_simplification::{ diff --git a/operators/src/processing/neighborhood_aggregate/aggregate.rs b/operators/src/processing/neighborhood_aggregate/aggregate.rs index 4ea3eef2c4..27cb3676df 100644 --- a/operators/src/processing/neighborhood_aggregate/aggregate.rs +++ b/operators/src/processing/neighborhood_aggregate/aggregate.rs @@ -1,4 +1,4 @@ -use super::{error, NeighborhoodAggregateError}; +use super::{NeighborhoodAggregateError, error}; use crate::util::number_statistics::NumberStatistics; use geoengine_datatypes::raster::{Grid2D, GridShape2D, GridSize, Pixel}; use num::Integer; diff --git a/operators/src/processing/neighborhood_aggregate/mod.rs b/operators/src/processing/neighborhood_aggregate/mod.rs index 82acfe5032..f183e5d734 100644 --- a/operators/src/processing/neighborhood_aggregate/mod.rs +++ b/operators/src/processing/neighborhood_aggregate/mod.rs @@ -3,8 +3,8 @@ mod tile_sub_query; use self::aggregate::{AggregateFunction, Neighborhood, StandardDeviation, Sum}; use self::tile_sub_query::NeighborhoodAggregateTileNeighborhood; -use crate::adapters::stack_individual_aligned_raster_bands; use crate::adapters::RasterSubQueryAdapter; +use crate::adapters::stack_individual_aligned_raster_bands; use crate::engine::{ CanonicOperatorName, ExecutionContext, InitializedRasterOperator, InitializedSources, Operator, OperatorName, QueryContext, QueryProcessor, RasterOperator, RasterQueryProcessor, @@ -20,7 +20,7 @@ use geoengine_datatypes::raster::{ use num::Integer; use num_traits::AsPrimitive; use serde::{Deserialize, Serialize}; -use snafu::{ensure, Snafu}; +use snafu::{Snafu, ensure}; use std::marker::PhantomData; /// A neighborhood aggregate operator applies an aggregate function to each raster pixel and its surrounding. @@ -251,11 +251,11 @@ where impl QueryProcessor for NeighborhoodAggregateProcessor where Q: QueryProcessor< - Output = RasterTile2D

, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D

, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, P: Pixel, f64: AsPrimitive

, A: AggregateFunction + 'static, diff --git a/operators/src/processing/neighborhood_aggregate/tile_sub_query.rs b/operators/src/processing/neighborhood_aggregate/tile_sub_query.rs index 638bccad09..b81d9f528e 100644 --- a/operators/src/processing/neighborhood_aggregate/tile_sub_query.rs +++ b/operators/src/processing/neighborhood_aggregate/tile_sub_query.rs @@ -337,8 +337,8 @@ mod tests { use crate::{ engine::MockExecutionContext, processing::neighborhood_aggregate::{ - aggregate::{StandardDeviation, Sum}, NeighborhoodParams, + aggregate::{StandardDeviation, Sum}, }, }; use geoengine_datatypes::{ diff --git a/operators/src/processing/point_in_polygon.rs b/operators/src/processing/point_in_polygon.rs index 50452bc5ff..4bfd218bd7 100644 --- a/operators/src/processing/point_in_polygon.rs +++ b/operators/src/processing/point_in_polygon.rs @@ -404,14 +404,16 @@ mod tests { #[test] fn point_in_polygon_boundary_conditions() { let collection = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![vec![vec![ - (0.0, 0.0).into(), - (10.0, 0.0).into(), - (10.0, 10.0).into(), - (0.0, 10.0).into(), - (0.0, 0.0).into(), - ]]]) - .unwrap()], + vec![ + MultiPolygon::new(vec![vec![vec![ + (0.0, 0.0).into(), + (10.0, 0.0).into(), + (10.0, 10.0).into(), + (0.0, 10.0).into(), + (0.0, 0.0).into(), + ]]]) + .unwrap(), + ], vec![Default::default(); 1], Default::default(), CacheHint::default(), @@ -435,26 +437,60 @@ mod tests { &Default::default() ),); - assert!(tester - .any_polygon_contains_coordinate(&Coordinate2D::new(9.9, 9.9), &Default::default()),); - assert!(tester - .any_polygon_contains_coordinate(&Coordinate2D::new(10.0, 9.9), &Default::default()),); - assert!(tester - .any_polygon_contains_coordinate(&Coordinate2D::new(9.9, 10.0), &Default::default()),); - - assert!(!tester - .any_polygon_contains_coordinate(&Coordinate2D::new(-0.1, -0.1), &Default::default()),); - assert!(!tester - .any_polygon_contains_coordinate(&Coordinate2D::new(0.0, -0.1), &Default::default()),); - assert!(!tester - .any_polygon_contains_coordinate(&Coordinate2D::new(-0.1, 0.0), &Default::default()),); - - assert!(!tester - .any_polygon_contains_coordinate(&Coordinate2D::new(10.1, 10.1), &Default::default()),); - assert!(!tester - .any_polygon_contains_coordinate(&Coordinate2D::new(10.1, 9.9), &Default::default()),); - assert!(!tester - .any_polygon_contains_coordinate(&Coordinate2D::new(9.9, 10.1), &Default::default()),); + assert!( + tester + .any_polygon_contains_coordinate(&Coordinate2D::new(9.9, 9.9), &Default::default()), + ); + assert!( + tester.any_polygon_contains_coordinate( + &Coordinate2D::new(10.0, 9.9), + &Default::default() + ), + ); + assert!( + tester.any_polygon_contains_coordinate( + &Coordinate2D::new(9.9, 10.0), + &Default::default() + ), + ); + + assert!( + !tester.any_polygon_contains_coordinate( + &Coordinate2D::new(-0.1, -0.1), + &Default::default() + ), + ); + assert!( + !tester.any_polygon_contains_coordinate( + &Coordinate2D::new(0.0, -0.1), + &Default::default() + ), + ); + assert!( + !tester.any_polygon_contains_coordinate( + &Coordinate2D::new(-0.1, 0.0), + &Default::default() + ), + ); + + assert!( + !tester.any_polygon_contains_coordinate( + &Coordinate2D::new(10.1, 10.1), + &Default::default() + ), + ); + assert!( + !tester.any_polygon_contains_coordinate( + &Coordinate2D::new(10.1, 9.9), + &Default::default() + ), + ); + assert!( + !tester.any_polygon_contains_coordinate( + &Coordinate2D::new(9.9, 10.1), + &Default::default() + ), + ); } #[tokio::test] @@ -771,14 +807,16 @@ mod tests { .unwrap(); let polygon_collection = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![vec![vec![ - (0.0, 0.0).into(), - (10.0, 0.0).into(), - (10.0, 10.0).into(), - (0.0, 10.0).into(), - (0.0, 0.0).into(), - ]]]) - .unwrap()], + vec![ + MultiPolygon::new(vec![vec![vec![ + (0.0, 0.0).into(), + (10.0, 0.0).into(), + (10.0, 10.0).into(), + (0.0, 10.0).into(), + (0.0, 0.0).into(), + ]]]) + .unwrap(), + ], vec![TimeInterval::default()], Default::default(), CacheHint::default(), @@ -836,14 +874,16 @@ mod tests { .unwrap(); let polygon_collection = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![vec![vec![ - (0.0, 0.0).into(), - (10.0, 0.0).into(), - (10.0, 10.0).into(), - (0.0, 10.0).into(), - (0.0, 0.0).into(), - ]]]) - .unwrap()], + vec![ + MultiPolygon::new(vec![vec![vec![ + (0.0, 0.0).into(), + (10.0, 0.0).into(), + (10.0, 10.0).into(), + (0.0, 10.0).into(), + (0.0, 0.0).into(), + ]]]) + .unwrap(), + ], vec![TimeInterval::default()], Default::default(), CacheHint::default(), diff --git a/operators/src/processing/point_in_polygon/tester.rs b/operators/src/processing/point_in_polygon/tester.rs index afff363728..b6679a1056 100644 --- a/operators/src/processing/point_in_polygon/tester.rs +++ b/operators/src/processing/point_in_polygon/tester.rs @@ -353,36 +353,38 @@ mod tests { #[test] fn point_in_polygon_tester() { let collection = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![ - vec![vec![ - Coordinate2D::new(20., 20.), - Coordinate2D::new(30., 20.), - Coordinate2D::new(30., 30.), - Coordinate2D::new(20., 30.), - Coordinate2D::new(20., 20.), - ]], - vec![ - vec![ - Coordinate2D::new(0., 0.), - Coordinate2D::new(10., 0.), - Coordinate2D::new(10., 10.), - Coordinate2D::new(0., 10.), - Coordinate2D::new(0., 0.), - ], + vec![ + MultiPolygon::new(vec![ + vec![vec![ + Coordinate2D::new(20., 20.), + Coordinate2D::new(30., 20.), + Coordinate2D::new(30., 30.), + Coordinate2D::new(20., 30.), + Coordinate2D::new(20., 20.), + ]], vec![ - Coordinate2D::new(1., 5.), - Coordinate2D::new(3., 3.), - Coordinate2D::new(5., 3.), - Coordinate2D::new(6., 5.), - Coordinate2D::new(7., 1.5), - Coordinate2D::new(4., 0.), - Coordinate2D::new(2., 1.), - Coordinate2D::new(1., 3.), - Coordinate2D::new(1., 5.), + vec![ + Coordinate2D::new(0., 0.), + Coordinate2D::new(10., 0.), + Coordinate2D::new(10., 10.), + Coordinate2D::new(0., 10.), + Coordinate2D::new(0., 0.), + ], + vec![ + Coordinate2D::new(1., 5.), + Coordinate2D::new(3., 3.), + Coordinate2D::new(5., 3.), + Coordinate2D::new(6., 5.), + Coordinate2D::new(7., 1.5), + Coordinate2D::new(4., 0.), + Coordinate2D::new(2., 1.), + Coordinate2D::new(1., 3.), + Coordinate2D::new(1., 5.), + ], ], - ], - ]) - .unwrap()], + ]) + .unwrap(), + ], vec![Default::default(); 1], Default::default(), CacheHint::default(), @@ -402,8 +404,10 @@ mod tests { assert!( tester.any_polygon_contains_coordinate(&Coordinate2D::new(4., 5.), &Default::default()) ); - assert!(!tester - .any_polygon_contains_coordinate(&Coordinate2D::new(4., 2.), &Default::default()),); + assert!( + !tester + .any_polygon_contains_coordinate(&Coordinate2D::new(4., 2.), &Default::default()), + ); assert_eq!( tester.multi_polygons_containing_coordinate( diff --git a/operators/src/processing/point_in_polygon/wrapper.rs b/operators/src/processing/point_in_polygon/wrapper.rs index c585cd06df..ef9ce30df9 100644 --- a/operators/src/processing/point_in_polygon/wrapper.rs +++ b/operators/src/processing/point_in_polygon/wrapper.rs @@ -87,9 +87,11 @@ mod tests { let wrapper = PointInPolygonTesterWithCollection::new(collection); - assert!(wrapper - .tester() - .any_polygon_contains_coordinate(&(5.0, 5.1).into(), &TimeInterval::default())); + assert!( + wrapper + .tester() + .any_polygon_contains_coordinate(&(5.0, 5.1).into(), &TimeInterval::default()) + ); assert_eq!(wrapper.collection().len(), 2); } diff --git a/operators/src/processing/raster_stacker.rs b/operators/src/processing/raster_stacker.rs index 900964d30e..b20ebea46d 100644 --- a/operators/src/processing/raster_stacker.rs +++ b/operators/src/processing/raster_stacker.rs @@ -12,7 +12,7 @@ use crate::util::Result; use async_trait::async_trait; use futures::stream::BoxStream; use geoengine_datatypes::primitives::{ - partitions_extent, time_interval_extent, BandSelection, RasterQueryRectangle, SpatialResolution, + BandSelection, RasterQueryRectangle, SpatialResolution, partitions_extent, time_interval_extent, }; use geoengine_datatypes::raster::{DynamicRasterDataType, Pixel, RasterTile2D, RenameBands}; use serde::{Deserialize, Serialize}; diff --git a/operators/src/processing/raster_type_conversion.rs b/operators/src/processing/raster_type_conversion.rs index 17bc6ec79a..fcce1107bd 100644 --- a/operators/src/processing/raster_type_conversion.rs +++ b/operators/src/processing/raster_type_conversion.rs @@ -1,5 +1,5 @@ use async_trait::async_trait; -use futures::{stream::BoxStream, StreamExt, TryFutureExt, TryStreamExt}; +use futures::{StreamExt, TryFutureExt, TryStreamExt, stream::BoxStream}; use geoengine_datatypes::{ primitives::{BandSelection, RasterQueryRectangle, SpatialPartition2D}, raster::{ConvertDataType, Pixel, RasterDataType, RasterTile2D}, diff --git a/operators/src/processing/raster_vector_join/aggregated.rs b/operators/src/processing/raster_vector_join/aggregated.rs index 70081790f5..b77b5ab188 100644 --- a/operators/src/processing/raster_vector_join/aggregated.rs +++ b/operators/src/processing/raster_vector_join/aggregated.rs @@ -14,17 +14,17 @@ use crate::engine::{ QueryContext, QueryProcessor, RasterQueryProcessor, VectorQueryProcessor, VectorResultDescriptor, }; +use crate::processing::raster_vector_join::TemporalAggregationMethod; use crate::processing::raster_vector_join::aggregator::{ Aggregator, FirstValueFloatAggregator, FirstValueIntAggregator, MeanValueAggregator, TypedAggregator, }; -use crate::processing::raster_vector_join::TemporalAggregationMethod; use crate::util::Result; use async_trait::async_trait; use geoengine_datatypes::primitives::{BoundingBox2D, Geometry, VectorQueryRectangle}; use super::util::{CoveredPixels, FeatureTimeSpanIter, PixelCoverCreator}; -use super::{create_feature_aggregator, FeatureAggregationMethod, RasterInput}; +use super::{FeatureAggregationMethod, RasterInput, create_feature_aggregator}; pub struct RasterVectorAggregateJoinProcessor { collection: Box>>, @@ -734,13 +734,15 @@ mod tests { .unwrap(); let polygons = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap()], + vec![ + MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap(), + ], vec![TimeInterval::new(0, 20).unwrap(); 1], Default::default(), CacheHint::default(), @@ -985,13 +987,15 @@ mod tests { .unwrap(); let polygons = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap()], + vec![ + MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap(), + ], vec![TimeInterval::new(0, 20).unwrap(); 1], Default::default(), CacheHint::default(), @@ -1057,36 +1061,38 @@ mod tests { let result = result.remove(0); - assert!(result.chunks_equal_ignoring_cache_hint( - &MultiPolygonCollection::from_slices( - &[MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap(),], - &[TimeInterval::new(0, 20).unwrap()], - &[ - ( - "foo", - FeatureData::Float(vec![ - (((3. + 1. + 40. + 30. + 400.) / 5.) - + ((4. + 6. + 30. + 40. + 300.) / 5.)) - / 2. - ]) - ), - ( - "foo_1", - FeatureData::Float(vec![ - (((251. + 249. + 140. + 130. + 410.) / 5.) - + ((44. + 66. + 300. + 400. + 301.) / 5.)) - / 2. - ]) - ) - ], + assert!( + result.chunks_equal_ignoring_cache_hint( + &MultiPolygonCollection::from_slices( + &[MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap(),], + &[TimeInterval::new(0, 20).unwrap()], + &[ + ( + "foo", + FeatureData::Float(vec![ + (((3. + 1. + 40. + 30. + 400.) / 5.) + + ((4. + 6. + 30. + 40. + 300.) / 5.)) + / 2. + ]) + ), + ( + "foo_1", + FeatureData::Float(vec![ + (((251. + 249. + 140. + 130. + 410.) / 5.) + + ((44. + 66. + 300. + 400. + 301.) / 5.)) + / 2. + ]) + ) + ], + ) + .unwrap() ) - .unwrap() - )); + ); } } diff --git a/operators/src/processing/raster_vector_join/mod.rs b/operators/src/processing/raster_vector_join/mod.rs index d7b8953841..6c1f10f216 100644 --- a/operators/src/processing/raster_vector_join/mod.rs +++ b/operators/src/processing/raster_vector_join/mod.rs @@ -700,19 +700,21 @@ mod tests { #[tokio::test] async fn it_checks_sref() { let point_source = MockFeatureCollectionSource::with_collections_and_sref( - vec![MultiPointCollection::from_data( - MultiPoint::many(vec![ - (-13.95, 20.05), - (-14.05, 20.05), - (-13.95, 19.95), - (-14.05, 19.95), - ]) + vec![ + MultiPointCollection::from_data( + MultiPoint::many(vec![ + (-13.95, 20.05), + (-14.05, 20.05), + (-13.95, 19.95), + (-14.05, 19.95), + ]) + .unwrap(), + vec![TimeInterval::default(); 4], + Default::default(), + CacheHint::default(), + ) .unwrap(), - vec![TimeInterval::default(); 4], - Default::default(), - CacheHint::default(), - ) - .unwrap()], + ], SpatialReference::from_str("EPSG:3857").unwrap(), ) .boxed(); @@ -750,19 +752,21 @@ mod tests { #[allow(clippy::too_many_lines)] async fn it_includes_bands_in_result_descriptor() { let point_source = MockFeatureCollectionSource::with_collections_and_sref( - vec![MultiPointCollection::from_data( - MultiPoint::many(vec![ - (-13.95, 20.05), - (-14.05, 20.05), - (-13.95, 19.95), - (-14.05, 19.95), - ]) + vec![ + MultiPointCollection::from_data( + MultiPoint::many(vec![ + (-13.95, 20.05), + (-14.05, 20.05), + (-13.95, 19.95), + (-14.05, 19.95), + ]) + .unwrap(), + vec![TimeInterval::default(); 4], + Default::default(), + CacheHint::default(), + ) .unwrap(), - vec![TimeInterval::default(); 4], - Default::default(), - CacheHint::default(), - ) - .unwrap()], + ], SpatialReference::from_str("EPSG:4326").unwrap(), ) .boxed(); diff --git a/operators/src/processing/raster_vector_join/non_aggregated.rs b/operators/src/processing/raster_vector_join/non_aggregated.rs index d1dee971c7..e07dd4d90f 100644 --- a/operators/src/processing/raster_vector_join/non_aggregated.rs +++ b/operators/src/processing/raster_vector_join/non_aggregated.rs @@ -1,6 +1,6 @@ use crate::adapters::FeatureCollectionStreamExt; use crate::processing::raster_vector_join::create_feature_aggregator; -use futures::stream::{once as once_stream, BoxStream}; +use futures::stream::{BoxStream, once as once_stream}; use futures::{StreamExt, TryStreamExt}; use geoengine_datatypes::primitives::{ BandSelection, BoundingBox2D, CacheHint, ColumnSelection, FeatureDataType, Geometry, @@ -540,22 +540,24 @@ mod tests { let result = result.remove(0); - assert!(result.chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &MultiPoint::many(vec![ - vec![(-13.95, 20.05)], - vec![(-14.05, 20.05)], - vec![(-13.95, 19.95)], - vec![(-14.05, 19.95)], - vec![(-13.95, 19.95), (-14.05, 19.95)], - ]) - .unwrap(), - &[time_instant; 5], - // these values are taken from loading the tiff in QGIS - &[("ndvi", FeatureData::Int(vec![54, 55, 51, 55, 51]))], + assert!( + result.chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &MultiPoint::many(vec![ + vec![(-13.95, 20.05)], + vec![(-14.05, 20.05)], + vec![(-13.95, 19.95)], + vec![(-14.05, 19.95)], + vec![(-13.95, 19.95), (-14.05, 19.95)], + ]) + .unwrap(), + &[time_instant; 5], + // these values are taken from loading the tiff in QGIS + &[("ndvi", FeatureData::Int(vec![54, 55, 51, 55, 51]))], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -652,21 +654,24 @@ mod tests { let result = result.remove(0); - assert!(result.chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &MultiPoint::many(vec![ - (-13.95, 20.05), - (-14.05, 20.05), - (-13.95, 19.95), - (-14.05, 19.95), - ]) - .unwrap(), - &[TimeInterval::new_instant(DateTime::new_utc(2014, 1, 1, 0, 0, 0)).unwrap(); 4], - // these values are taken from loading the tiff in QGIS - &[("ndvi", FeatureData::Int(vec![54, 55, 51, 55]))], + assert!( + result.chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &MultiPoint::many(vec![ + (-13.95, 20.05), + (-14.05, 20.05), + (-13.95, 19.95), + (-14.05, 19.95), + ]) + .unwrap(), + &[TimeInterval::new_instant(DateTime::new_utc(2014, 1, 1, 0, 0, 0)).unwrap(); + 4], + // these values are taken from loading the tiff in QGIS + &[("ndvi", FeatureData::Int(vec![54, 55, 51, 55]))], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -770,25 +775,27 @@ mod tests { let result = result.remove(0); - assert!(result.chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &MultiPoint::many(vec![ - (-13.95, 20.05), - (-14.05, 20.05), - (-13.95, 19.95), - (-14.05, 19.95), - ]) - .unwrap(), - &[TimeInterval::new( - DateTime::new_utc(2014, 1, 1, 0, 0, 0), - DateTime::new_utc(2014, 2, 1, 0, 0, 0), + assert!( + result.chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &MultiPoint::many(vec![ + (-13.95, 20.05), + (-14.05, 20.05), + (-13.95, 19.95), + (-14.05, 19.95), + ]) + .unwrap(), + &[TimeInterval::new( + DateTime::new_utc(2014, 1, 1, 0, 0, 0), + DateTime::new_utc(2014, 2, 1, 0, 0, 0), + ) + .unwrap(); 4], + // these values are taken from loading the tiff in QGIS + &[("ndvi", FeatureData::Int(vec![54, 55, 51, 55]))], ) - .unwrap(); 4], - // these values are taken from loading the tiff in QGIS - &[("ndvi", FeatureData::Int(vec![54, 55, 51, 55]))], + .unwrap() ) - .unwrap() - )); + ); } #[allow(clippy::too_many_lines)] @@ -903,28 +910,30 @@ mod tests { DateTime::new_utc(2014, 3, 1, 0, 0, 0), ) .unwrap(); - assert!(result.chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &MultiPoint::many(vec![ - (-13.95, 20.05), - (-14.05, 20.05), - (-13.95, 19.95), - (-14.05, 19.95), - (-13.95, 20.05), - (-14.05, 20.05), - (-13.95, 19.95), - (-14.05, 19.95), - ]) - .unwrap(), - &[t1, t1, t1, t1, t2, t2, t2, t2], - // these values are taken from loading the tiff in QGIS - &[( - "ndvi", - FeatureData::Int(vec![54, 55, 51, 55, 52, 55, 50, 53]) - )], + assert!( + result.chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &MultiPoint::many(vec![ + (-13.95, 20.05), + (-14.05, 20.05), + (-13.95, 19.95), + (-14.05, 19.95), + (-13.95, 20.05), + (-14.05, 20.05), + (-13.95, 19.95), + (-14.05, 19.95), + ]) + .unwrap(), + &[t1, t1, t1, t1, t2, t2, t2, t2], + // these values are taken from loading the tiff in QGIS + &[( + "ndvi", + FeatureData::Int(vec![54, 55, 51, 55, 52, 55, 50, 53]) + )], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -1087,28 +1096,30 @@ mod tests { let t1 = TimeInterval::new(0, 10).unwrap(); let t2 = TimeInterval::new(10, 20).unwrap(); - assert!(result.chunks_equal_ignoring_cache_hint( - &MultiPointCollection::from_slices( - &MultiPoint::many(vec![ - vec![(0.0, 0.0), (2.0, 0.0)], - vec![(1.0, 0.0), (3.0, 0.0)], - vec![(0.0, 0.0), (2.0, 0.0)], - vec![(1.0, 0.0), (3.0, 0.0)], - ]) - .unwrap(), - &[t1, t1, t2, t2], - &[( - "ndvi", - FeatureData::Float(vec![ - (6. + 60.) / 2., - (5. + 50.) / 2., - (1. + 10.) / 2., - (2. + 20.) / 2. + assert!( + result.chunks_equal_ignoring_cache_hint( + &MultiPointCollection::from_slices( + &MultiPoint::many(vec![ + vec![(0.0, 0.0), (2.0, 0.0)], + vec![(1.0, 0.0), (3.0, 0.0)], + vec![(0.0, 0.0), (2.0, 0.0)], + vec![(1.0, 0.0), (3.0, 0.0)], ]) - )], + .unwrap(), + &[t1, t1, t2, t2], + &[( + "ndvi", + FeatureData::Float(vec![ + (6. + 60.) / 2., + (5. + 50.) / 2., + (1. + 10.) / 2., + (2. + 20.) / 2. + ]) + )], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -1229,13 +1240,15 @@ mod tests { .unwrap(); let polygons = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap()], + vec![ + MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap(), + ], vec![TimeInterval::default(); 1], Default::default(), CacheHint::default(), @@ -1302,35 +1315,37 @@ mod tests { let t1 = TimeInterval::new(0, 10).unwrap(); let t2 = TimeInterval::new(10, 20).unwrap(); - assert!(result.chunks_equal_ignoring_cache_hint( - &MultiPolygonCollection::from_slices( - &[ - MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap(), - MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap() - ], - &[t1, t2], - &[( - "ndvi", - FeatureData::Float(vec![ - (3. + 1. + 40. + 30. + 400.) / 5., - (4. + 6. + 30. + 40. + 300.) / 5. - ]) - )], + assert!( + result.chunks_equal_ignoring_cache_hint( + &MultiPolygonCollection::from_slices( + &[ + MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap(), + MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap() + ], + &[t1, t2], + &[( + "ndvi", + FeatureData::Float(vec![ + (3. + 1. + 40. + 30. + 400.) / 5., + (4. + 6. + 30. + 40. + 300.) / 5. + ]) + )], + ) + .unwrap() ) - .unwrap() - )); + ); } #[tokio::test] @@ -1542,13 +1557,15 @@ mod tests { .unwrap(); let polygons = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap()], + vec![ + MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap(), + ], vec![TimeInterval::default(); 1], Default::default(), CacheHint::default(), @@ -1615,43 +1632,45 @@ mod tests { let t1 = TimeInterval::new(0, 10).unwrap(); let t2 = TimeInterval::new(10, 20).unwrap(); - assert!(result.chunks_equal_ignoring_cache_hint( - &MultiPolygonCollection::from_slices( - &[ - MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap(), - MultiPolygon::new(vec![vec![vec![ - (0.5, -0.5).into(), - (4., -1.).into(), - (0.5, -2.5).into(), - (0.5, -0.5).into(), - ]]]) - .unwrap() - ], - &[t1, t2], - &[ - ( - "foo", - FeatureData::Float(vec![ - (3. + 1. + 40. + 30. + 400.) / 5., - (4. + 6. + 30. + 40. + 300.) / 5. - ]) - ), - ( - "foo_1", - FeatureData::Float(vec![ - (251. + 249. + 140. + 130. + 410.) / 5., - (44. + 66. + 300. + 400. + 301.) / 5. - ]) - ) - ], + assert!( + result.chunks_equal_ignoring_cache_hint( + &MultiPolygonCollection::from_slices( + &[ + MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap(), + MultiPolygon::new(vec![vec![vec![ + (0.5, -0.5).into(), + (4., -1.).into(), + (0.5, -2.5).into(), + (0.5, -0.5).into(), + ]]]) + .unwrap() + ], + &[t1, t2], + &[ + ( + "foo", + FeatureData::Float(vec![ + (3. + 1. + 40. + 30. + 400.) / 5., + (4. + 6. + 30. + 40. + 300.) / 5. + ]) + ), + ( + "foo_1", + FeatureData::Float(vec![ + (251. + 249. + 140. + 130. + 410.) / 5., + (44. + 66. + 300. + 400. + 301.) / 5. + ]) + ) + ], + ) + .unwrap() ) - .unwrap() - )); + ); } } diff --git a/operators/src/processing/rasterization/mod.rs b/operators/src/processing/rasterization/mod.rs index c5acefe217..f2e70804cf 100644 --- a/operators/src/processing/rasterization/mod.rs +++ b/operators/src/processing/rasterization/mod.rs @@ -15,7 +15,7 @@ use crate::util; use async_trait::async_trait; use futures::stream::BoxStream; -use futures::{stream, StreamExt}; +use futures::{StreamExt, stream}; use geoengine_datatypes::collections::GeometryCollection; use geoengine_datatypes::primitives::{ @@ -608,7 +608,7 @@ mod tests { use crate::mock::{MockPointSource, MockPointSourceParams}; use crate::processing::rasterization::GridSizeMode::{Fixed, Relative}; use crate::processing::rasterization::{ - gaussian, DensityParams, GridOrDensity, GridParams, Rasterization, + DensityParams, GridOrDensity, GridParams, Rasterization, gaussian, }; use futures::StreamExt; use geoengine_datatypes::primitives::{ diff --git a/operators/src/processing/reprojection.rs b/operators/src/processing/reprojection.rs index 8c9492d14b..3cb8b588cb 100644 --- a/operators/src/processing/reprojection.rs +++ b/operators/src/processing/reprojection.rs @@ -3,8 +3,8 @@ use std::marker::PhantomData; use super::map_query::MapQueryProcessor; use crate::{ adapters::{ - fold_by_coordinate_lookup_future, FillerTileCacheExpirationStrategy, FillerTimeBounds, - RasterSubQueryAdapter, SparseTilesFillAdapter, TileReprojectionSubQuery, + FillerTileCacheExpirationStrategy, FillerTimeBounds, RasterSubQueryAdapter, + SparseTilesFillAdapter, TileReprojectionSubQuery, fold_by_coordinate_lookup_future, }, engine::{ CanonicOperatorName, ExecutionContext, InitializedRasterOperator, InitializedSources, @@ -18,12 +18,12 @@ use crate::{ }; use async_trait::async_trait; use futures::stream::BoxStream; -use futures::{stream, StreamExt}; +use futures::{StreamExt, stream}; use geoengine_datatypes::{ collections::FeatureCollection, operations::reproject::{ - reproject_and_unify_bbox, reproject_query, suggest_pixel_size_from_diag_cross_projected, CoordinateProjection, CoordinateProjector, Reproject, ReprojectClipped, + reproject_and_unify_bbox, reproject_query, suggest_pixel_size_from_diag_cross_projected, }, primitives::{ BandSelection, BoundingBox2D, ColumnSelection, Geometry, RasterQueryRectangle, @@ -337,11 +337,11 @@ where impl QueryProcessor for VectorReprojectionProcessor where Q: QueryProcessor< - Output = FeatureCollection, - SpatialBounds = BoundingBox2D, - Selection = ColumnSelection, - ResultDescription = VectorResultDescriptor, - >, + Output = FeatureCollection, + SpatialBounds = BoundingBox2D, + Selection = ColumnSelection, + ResultDescription = VectorResultDescriptor, + >, FeatureCollection: Reproject>, G: Geometry + ArrowTyped, { @@ -591,11 +591,11 @@ where impl RasterReprojectionProcessor where Q: QueryProcessor< - Output = RasterTile2D

, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D

, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, P: Pixel, { pub fn new( @@ -622,11 +622,11 @@ where impl QueryProcessor for RasterReprojectionProcessor where Q: QueryProcessor< - Output = RasterTile2D

, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D

, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, P: Pixel, { type Output = RasterTile2D

; @@ -735,12 +735,12 @@ mod tests { raster::{Grid, GridShape, GridShape2D, GridSize, RasterDataType, RasterTile2D}, spatial_reference::SpatialReferenceAuthority, util::{ + Identifier, test::TestDefault, well_known_data::{ - COLOGNE_EPSG_4326, COLOGNE_EPSG_900_913, HAMBURG_EPSG_4326, HAMBURG_EPSG_900_913, - MARBURG_EPSG_4326, MARBURG_EPSG_900_913, + COLOGNE_EPSG_900_913, COLOGNE_EPSG_4326, HAMBURG_EPSG_900_913, HAMBURG_EPSG_4326, + MARBURG_EPSG_900_913, MARBURG_EPSG_4326, }, - Identifier, }, }; use std::collections::HashMap; @@ -825,12 +825,14 @@ mod tests { #[tokio::test] async fn multi_lines() -> Result<()> { let lines = MultiLineStringCollection::from_data( - vec![MultiLineString::new(vec![vec![ - MARBURG_EPSG_4326, - COLOGNE_EPSG_4326, - HAMBURG_EPSG_4326, - ]]) - .unwrap()], + vec![ + MultiLineString::new(vec![vec![ + MARBURG_EPSG_4326, + COLOGNE_EPSG_4326, + HAMBURG_EPSG_4326, + ]]) + .unwrap(), + ], vec![TimeInterval::new_unchecked(0, 1); 1], Default::default(), CacheHint::default(), @@ -905,13 +907,15 @@ mod tests { #[tokio::test] async fn multi_polygons() -> Result<()> { let polygons = MultiPolygonCollection::from_data( - vec![MultiPolygon::new(vec![vec![vec![ - MARBURG_EPSG_4326, - COLOGNE_EPSG_4326, - HAMBURG_EPSG_4326, - MARBURG_EPSG_4326, - ]]]) - .unwrap()], + vec![ + MultiPolygon::new(vec![vec![vec![ + MARBURG_EPSG_4326, + COLOGNE_EPSG_4326, + HAMBURG_EPSG_4326, + MARBURG_EPSG_4326, + ]]]) + .unwrap(), + ], vec![TimeInterval::new_unchecked(0, 1); 1], Default::default(), CacheHint::default(), @@ -1163,7 +1167,13 @@ mod tests { include_bytes!( "../../../test_data/raster/modis_ndvi/projected_3857/MOD13A2_M_NDVI_2014-04-01_tile-20.rst" ) as &[u8], - res[8].clone().into_materialized_tile().grid_array.inner_grid.data.as_slice() + res[8] + .clone() + .into_materialized_tile() + .grid_array + .inner_grid + .data + .as_slice() ); Ok(()) @@ -1532,18 +1542,20 @@ mod tests { let query_ctx = MockQueryContext::test_default(); let point_source = MockFeatureCollectionSource::with_collections_and_sref( - vec![MultiPointCollection::from_data( - MultiPoint::many(vec![ - vec![(166_021.443_080_538_42, 0.0)], - vec![(534_994.655_061_136_1, 9_329_005.182_447_437)], - vec![(499_999.999_999_999_5, 4_649_776.224_819_178)], - ]) + vec![ + MultiPointCollection::from_data( + MultiPoint::many(vec![ + vec![(166_021.443_080_538_42, 0.0)], + vec![(534_994.655_061_136_1, 9_329_005.182_447_437)], + vec![(499_999.999_999_999_5, 4_649_776.224_819_178)], + ]) + .unwrap(), + vec![TimeInterval::default(); 3], + HashMap::default(), + CacheHint::default(), + ) .unwrap(), - vec![TimeInterval::default(); 3], - HashMap::default(), - CacheHint::default(), - ) - .unwrap()], + ], SpatialReference::new(SpatialReferenceAuthority::Epsg, 32636), //utm36n ) .boxed(); @@ -1613,16 +1625,18 @@ mod tests { let query_ctx = MockQueryContext::test_default(); let point_source = MockFeatureCollectionSource::with_collections_and_sref( - vec![MultiPointCollection::from_data( - MultiPoint::many(vec![ - vec![(758_565., 4_928_353.)], // (12.25, 44,46) - ]) + vec![ + MultiPointCollection::from_data( + MultiPoint::many(vec![ + vec![(758_565., 4_928_353.)], // (12.25, 44,46) + ]) + .unwrap(), + vec![TimeInterval::default(); 1], + HashMap::default(), + CacheHint::default(), + ) .unwrap(), - vec![TimeInterval::default(); 1], - HashMap::default(), - CacheHint::default(), - ) - .unwrap()], + ], SpatialReference::new(SpatialReferenceAuthority::Epsg, 32636), //utm36n ) .boxed(); diff --git a/operators/src/processing/temporal_raster_aggregation/aggregators.rs b/operators/src/processing/temporal_raster_aggregation/aggregators.rs index 0dd9e3dab4..1173c790bf 100644 --- a/operators/src/processing/temporal_raster_aggregation/aggregators.rs +++ b/operators/src/processing/temporal_raster_aggregation/aggregators.rs @@ -1,4 +1,4 @@ -use crate::util::{statistics::SafePSquareQuantileEstimator, Result}; +use crate::util::{Result, statistics::SafePSquareQuantileEstimator}; use geoengine_datatypes::raster::{GridOrEmpty2D, MapIndexedElements, Pixel}; use std::marker::PhantomData; @@ -137,11 +137,7 @@ pub struct Min; impl BinaryOperation

for Min { fn op(state: P, value: P) -> P { - if state < value { - state - } else { - value - } + if state < value { state } else { value } } } @@ -153,11 +149,7 @@ pub struct Max; impl BinaryOperation

for Max { fn op(state: P, value: P) -> P { - if state > value { - state - } else { - value - } + if state > value { state } else { value } } } diff --git a/operators/src/processing/temporal_raster_aggregation/first_last_subquery.rs b/operators/src/processing/temporal_raster_aggregation/first_last_subquery.rs index c2d94cc0e0..a5622f4902 100644 --- a/operators/src/processing/temporal_raster_aggregation/first_last_subquery.rs +++ b/operators/src/processing/temporal_raster_aggregation/first_last_subquery.rs @@ -3,7 +3,7 @@ use crate::{ util::Result, }; use async_trait::async_trait; -use futures::{future::BoxFuture, Future, FutureExt, TryFuture, TryFutureExt}; +use futures::{Future, FutureExt, TryFuture, TryFutureExt, future::BoxFuture}; use geoengine_datatypes::{ primitives::{ CacheHint, QueryRectangle, RasterQueryRectangle, SpatialPartitioned, TimeInstance, @@ -171,7 +171,7 @@ fn build_temporal_accu( query_rect: &RasterQueryRectangle, tile_info: TileInformation, pool: Arc, -) -> impl Future>> { +) -> impl Future>> + use { let time_interval = query_rect.time_interval; crate::util::spawn_blocking(move || TemporalRasterAggregationTileAccu { accu_tile: RasterTile2D::new_with_tile_info( @@ -247,7 +247,7 @@ fn build_temporal_no_data_accu( query_rect: &RasterQueryRectangle, tile_info: TileInformation, pool: Arc, -) -> impl Future>> { +) -> impl Future>> + use { let time_interval = query_rect.time_interval; crate::util::spawn_blocking(move || { let output_raster = EmptyGrid2D::new(tile_info.tile_size_in_pixels).into(); diff --git a/operators/src/processing/temporal_raster_aggregation/temporal_aggregation_operator.rs b/operators/src/processing/temporal_raster_aggregation/temporal_aggregation_operator.rs index b0e12da377..6a351e731c 100644 --- a/operators/src/processing/temporal_raster_aggregation/temporal_aggregation_operator.rs +++ b/operators/src/processing/temporal_raster_aggregation/temporal_aggregation_operator.rs @@ -6,7 +6,7 @@ use super::aggregators::{ TemporalRasterPixelAggregator, }; use super::first_last_subquery::{ - first_tile_fold_future, last_tile_fold_future, TemporalRasterAggregationSubQueryNoDataOnly, + TemporalRasterAggregationSubQueryNoDataOnly, first_tile_fold_future, last_tile_fold_future, }; use super::subquery::GlobalStateTemporalRasterAggregationSubQuery; use crate::adapters::stack_individual_aligned_raster_bands; @@ -470,11 +470,11 @@ where impl QueryProcessor for TemporalRasterAggregationProcessor where Q: QueryProcessor< - Output = RasterTile2D

, - SpatialBounds = SpatialPartition2D, - Selection = BandSelection, - ResultDescription = RasterResultDescriptor, - >, + Output = RasterTile2D

, + SpatialBounds = SpatialPartition2D, + Selection = BandSelection, + ResultDescription = RasterResultDescriptor, + >, P: Pixel, { type Output = RasterTile2D

; @@ -517,8 +517,8 @@ mod tests { }, mock::{MockRasterSource, MockRasterSourceParams}, processing::{ - raster_stacker::{RasterStacker, RasterStackerParams}, Expression, ExpressionParams, + raster_stacker::{RasterStacker, RasterStackerParams}, }, }; @@ -1184,26 +1184,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -1285,26 +1287,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -1372,26 +1376,28 @@ mod tests { assert_eq!(result.len(), 2); - assert!(result[0].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![13, 42, 15, 16, 17, 18]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, false, true, true, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[0].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![13, 42, 15, 16, 17, 18]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, false, true, true, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( &RasterTile2D::new_with_tile_info( @@ -1487,26 +1493,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 2, 3, 42, 5, 6]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 2, 3, 42, 5, 6]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -1588,26 +1596,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -1689,26 +1699,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -1774,60 +1786,62 @@ mod tests { .collect::>() .await; - assert!(result.tiles_equal_ignoring_cache_hint(&[ - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ) - ])); + assert!( + result.tiles_equal_ignoring_cache_hint(&[ + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ) + ]) + ); } #[tokio::test] @@ -1909,26 +1923,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap(), - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap(), + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -2010,26 +2026,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 2, 3, 0, 5, 6]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -2106,72 +2124,74 @@ mod tests { .collect::>() .await; - assert!(result.tiles_equal_ignoring_cache_hint(&[ - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new( - [3, 2].into(), - vec![13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20] - ) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new( - [3, 2].into(), - vec![13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20] - ) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new( - [3, 2].into(), - vec![13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20] - ) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new( - [3, 2].into(), - vec![13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20] + assert!( + result.tiles_equal_ignoring_cache_hint(&[ + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new( + [3, 2].into(), + vec![13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20] + ) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new( + [3, 2].into(), + vec![13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20] + ) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new( + [3, 2].into(), + vec![13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20] + ) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new( + [3, 2].into(), + vec![13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20, 13 * 20] + ) + .unwrap() + .into(), + CacheHint::default() ) - .unwrap() - .into(), - CacheHint::default() - ) - ]),); + ]), + ); } #[tokio::test] @@ -2237,60 +2257,62 @@ mod tests { .collect::>() .await; - assert!(result.tiles_equal_ignoring_cache_hint(&[ - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![2, 2, 2, 2, 2, 2]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![2, 2, 2, 2, 2, 2]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![2, 2, 2, 2, 2, 2]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![2, 2, 2, 2, 2, 2]) - .unwrap() - .into(), - CacheHint::default() - ) - ])); + assert!( + result.tiles_equal_ignoring_cache_hint(&[ + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![2, 2, 2, 2, 2, 2]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![2, 2, 2, 2, 2, 2]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![2, 2, 2, 2, 2, 2]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![2, 2, 2, 2, 2, 2]) + .unwrap() + .into(), + CacheHint::default() + ) + ]) + ); } #[tokio::test] @@ -2372,26 +2394,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 1, 1, 0, 1, 1]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 1, 1, 0, 1, 1]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -2473,26 +2497,28 @@ mod tests { ) )); - assert!(result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( - &RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 30), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - GridOrEmpty::from( - MaskedGrid2D::new( - Grid2D::new([3, 2].into(), vec![1, 1, 1, 0, 1, 1]).unwrap(), - Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) - .unwrap() - ) - .unwrap() - ), - CacheHint::default() + assert!( + result[1].as_ref().unwrap().tiles_equal_ignoring_cache_hint( + &RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 30), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + GridOrEmpty::from( + MaskedGrid2D::new( + Grid2D::new([3, 2].into(), vec![1, 1, 1, 0, 1, 1]).unwrap(), + Grid2D::new([3, 2].into(), vec![true, true, true, false, true, true]) + .unwrap() + ) + .unwrap() + ), + CacheHint::default() + ) ) - )); + ); } #[tokio::test] @@ -2862,112 +2888,114 @@ mod tests { .collect::>() .await; - assert!(result.tiles_equal_ignoring_cache_hint(&[ - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 1, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(0, 20), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 1, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 0].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 1, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 0, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ), - RasterTile2D::new_with_tile_info( - TimeInterval::new_unchecked(20, 40), - TileInformation { - global_tile_position: [-1, 1].into(), - tile_size_in_pixels: [3, 2].into(), - global_geo_transform: TestDefault::test_default(), - }, - 1, - Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) - .unwrap() - .into(), - CacheHint::default() - ) - ])); + assert!( + result.tiles_equal_ignoring_cache_hint(&[ + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 1, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(0, 20), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 1, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 0].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 1, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 0, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ), + RasterTile2D::new_with_tile_info( + TimeInterval::new_unchecked(20, 40), + TileInformation { + global_tile_position: [-1, 1].into(), + tile_size_in_pixels: [3, 2].into(), + global_geo_transform: TestDefault::test_default(), + }, + 1, + Grid2D::new([3, 2].into(), vec![13, 13, 13, 13, 13, 13]) + .unwrap() + .into(), + CacheHint::default() + ) + ]) + ); } #[tokio::test] diff --git a/operators/src/processing/time_projection/mod.rs b/operators/src/processing/time_projection/mod.rs index dd2a3509f0..b9cfb0f230 100644 --- a/operators/src/processing/time_projection/mod.rs +++ b/operators/src/processing/time_projection/mod.rs @@ -16,10 +16,10 @@ use geoengine_datatypes::primitives::{ColumnSelection, Geometry, TimeInterval}; use geoengine_datatypes::primitives::{TimeInstance, TimeStep, VectorQueryRectangle}; use geoengine_datatypes::util::arrow::ArrowTyped; use log::debug; -use rayon::iter::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; use rayon::ThreadPool; +use rayon::iter::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; use serde::{Deserialize, Serialize}; -use snafu::{ensure, ResultExt, Snafu}; +use snafu::{ResultExt, Snafu, ensure}; /// Projection of time information in queries and data /// diff --git a/operators/src/processing/time_shift.rs b/operators/src/processing/time_shift.rs index 4671a6ba50..8f295b2a4d 100644 --- a/operators/src/processing/time_shift.rs +++ b/operators/src/processing/time_shift.rs @@ -8,8 +8,8 @@ use crate::engine::{ }; use crate::util::Result; use async_trait::async_trait; -use futures::stream::BoxStream; use futures::StreamExt; +use futures::stream::BoxStream; use geoengine_datatypes::collections::{ FeatureCollection, FeatureCollectionInfos, FeatureCollectionModifications, }; @@ -735,11 +735,13 @@ mod tests { let expected = MultiPointCollection::from_data( MultiPoint::many(vec![(0., 0.)]).unwrap(), - vec![TimeInterval::new( - DateTime::new_utc(2009, 1, 1, 0, 0, 0), - DateTime::new_utc_with_millis(2013, 8, 1, 23, 59, 59, 999), - ) - .unwrap()], + vec![ + TimeInterval::new( + DateTime::new_utc(2009, 1, 1, 0, 0, 0), + DateTime::new_utc_with_millis(2013, 8, 1, 23, 59, 59, 999), + ) + .unwrap(), + ], Default::default(), CacheHint::default(), ) diff --git a/operators/src/processing/vector_join/equi_data_join.rs b/operators/src/processing/vector_join/equi_data_join.rs index c239ff6cf1..79b95740f6 100644 --- a/operators/src/processing/vector_join/equi_data_join.rs +++ b/operators/src/processing/vector_join/equi_data_join.rs @@ -2,8 +2,8 @@ use std::collections::HashMap; use std::sync::Arc; use float_cmp::approx_eq; -use futures::stream::{self, BoxStream}; use futures::StreamExt; +use futures::stream::{self, BoxStream}; use geoengine_datatypes::collections::{ BuilderProvider, DataCollection, FeatureCollection, FeatureCollectionBuilder, diff --git a/operators/src/source/csv.rs b/operators/src/source/csv.rs index b189e8e103..41fae936f5 100644 --- a/operators/src/source/csv.rs +++ b/operators/src/source/csv.rs @@ -10,7 +10,7 @@ use futures::{Stream, StreamExt}; use geoengine_datatypes::dataset::NamedData; use geoengine_datatypes::primitives::{ColumnSelection, VectorQueryRectangle}; use serde::{Deserialize, Serialize}; -use snafu::{ensure, OptionExt, ResultExt}; +use snafu::{OptionExt, ResultExt, ensure}; use geoengine_datatypes::collections::{ BuilderProvider, GeoFeatureCollectionRowBuilder, MultiPointCollection, VectorDataType, @@ -27,7 +27,7 @@ use crate::engine::{ }; use crate::engine::{QueryProcessor, WorkflowOperatorPath}; use crate::error; -use crate::util::{safe_lock_mutex, Result}; +use crate::util::{Result, safe_lock_mutex}; use async_trait::async_trait; use std::sync::atomic::Ordering; diff --git a/operators/src/source/gdal_source/loading_info.rs b/operators/src/source/gdal_source/loading_info.rs index c941b88c6b..c03d0c053c 100644 --- a/operators/src/source/gdal_source/loading_info.rs +++ b/operators/src/source/gdal_source/loading_info.rs @@ -417,8 +417,7 @@ impl Iterator for DynamicGdalLoadingInfoPartIterator { time: time_interval, params: Some(loading_info_part_params), cache_ttl: self.cache_ttl, - }) - .map_err(Into::into); + }); Some(loading_info_part) } else { diff --git a/operators/src/source/gdal_source/mod.rs b/operators/src/source/gdal_source/mod.rs index 59385a5ff9..18b67eb681 100644 --- a/operators/src/source/gdal_source/mod.rs +++ b/operators/src/source/gdal_source/mod.rs @@ -4,10 +4,10 @@ use crate::adapters::{ use crate::engine::{ CanonicOperatorName, MetaData, OperatorData, OperatorName, QueryProcessor, WorkflowOperatorPath, }; +use crate::util::TemporaryGdalThreadLocalConfigOptions; use crate::util::gdal::gdal_open_dataset_ex; use crate::util::input::float_option_with_nan; use crate::util::retry::retry; -use crate::util::TemporaryGdalThreadLocalConfigOptions; use crate::{ engine::{ InitializedRasterOperator, RasterOperator, RasterQueryProcessor, RasterResultDescriptor, @@ -18,12 +18,12 @@ use crate::{ }; use async_trait::async_trait; pub use error::GdalSourceError; -use float_cmp::{approx_eq, ApproxEq}; +use float_cmp::{ApproxEq, approx_eq}; +use futures::{Future, TryStreamExt}; use futures::{ - stream::{self, BoxStream, StreamExt}, Stream, + stream::{self, BoxStream, StreamExt}, }; -use futures::{Future, TryStreamExt}; use gdal::errors::GdalError; use gdal::raster::{GdalType, RasterBand as GdalRasterBand}; use gdal::{Dataset as GdalDataset, DatasetOptions, GdalOpenFlags, Metadata as GdalMetadata}; @@ -55,7 +55,7 @@ use log::debug; use num::FromPrimitive; use postgres_types::{FromSql, ToSql}; use serde::{Deserialize, Serialize}; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use std::collections::HashMap; use std::convert::TryFrom; use std::ffi::CString; @@ -596,20 +596,18 @@ impl GdalRasterLoader { /// A stream of futures producing `RasterTile2D` for a single slice in time /// fn temporal_slice_tile_future_stream( - query: &RasterQueryRectangle, + spatial_bounds: SpatialPartition2D, info: GdalLoadingInfoTemporalSlice, tiling_strategy: TilingStrategy, - ) -> impl Stream>>> { - stream::iter(tiling_strategy.tile_information_iterator(query.spatial_bounds)).map( - move |tile| { - GdalRasterLoader::load_tile_async( - info.params.clone(), - tile, - info.time, - info.cache_ttl.into(), - ) - }, - ) + ) -> impl Stream>>> + use { + stream::iter(tiling_strategy.tile_information_iterator(spatial_bounds)).map(move |tile| { + GdalRasterLoader::load_tile_async( + info.params.clone(), + tile, + info.time, + info.cache_ttl.into(), + ) + }) } fn loading_info_to_tile_stream< @@ -617,13 +615,18 @@ impl GdalRasterLoader { S: Stream>, >( loading_info_stream: S, - query: RasterQueryRectangle, + query: &RasterQueryRectangle, tiling_strategy: TilingStrategy, - ) -> impl Stream>> { + ) -> impl Stream>> + use { + let spatial_bounds = query.spatial_bounds; loading_info_stream .map_ok(move |info| { - GdalRasterLoader::temporal_slice_tile_future_stream(&query, info, tiling_strategy) - .map(Result::Ok) + GdalRasterLoader::temporal_slice_tile_future_stream( + spatial_bounds, + info, + tiling_strategy, + ) + .map(Result::Ok) }) .try_flatten() .try_buffered(16) // TODO: make this configurable @@ -746,15 +749,21 @@ where ) { (Some(start), Some(end)) => FillerTimeBounds::new(start, end), (None, None) => { - log::warn!("The provider did not provide a time range that covers the query. Falling back to query time range. "); + log::warn!( + "The provider did not provide a time range that covers the query. Falling back to query time range. " + ); FillerTimeBounds::new(query.time_interval.start(), query.time_interval.end()) } (Some(start), None) => { - log::warn!("The provider did only provide a time range start that covers the query. Falling back to query time end. "); + log::warn!( + "The provider did only provide a time range start that covers the query. Falling back to query time end. " + ); FillerTimeBounds::new(start, query.time_interval.end()) } (None, Some(end)) => { - log::warn!("The provider did only provide a time range end that covers the query. Falling back to query time start. "); + log::warn!( + "The provider did only provide a time range end that covers the query. Falling back to query time start. " + ); FillerTimeBounds::new(query.time_interval.start(), end) } }; @@ -766,11 +775,8 @@ where let source_stream = stream::iter(skipping_loading_info); - let source_stream = GdalRasterLoader::loading_info_to_tile_stream( - source_stream, - query.clone(), - tiling_strategy, - ); + let source_stream = + GdalRasterLoader::loading_info_to_tile_stream(source_stream, &query, tiling_strategy); // use SparseTilesFillAdapter to fill all the gaps let filled_stream = SparseTilesFillAdapter::new( @@ -872,12 +878,12 @@ impl InitializedRasterOperator for InitializedGdalSourceOperator { RasterDataType::U64 => { return Err(GdalSourceError::UnsupportedRasterType { raster_type: RasterDataType::U64, - })? + })?; } RasterDataType::I8 => { return Err(GdalSourceError::UnsupportedRasterType { raster_type: RasterDataType::I8, - })? + })?; } RasterDataType::I16 => TypedRasterQueryProcessor::I16( GdalSourceProcessor { @@ -900,7 +906,7 @@ impl InitializedRasterOperator for InitializedGdalSourceOperator { RasterDataType::I64 => { return Err(GdalSourceError::UnsupportedRasterType { raster_type: RasterDataType::I64, - })? + })?; } RasterDataType::F32 => TypedRasterQueryProcessor::F32( GdalSourceProcessor { @@ -1249,8 +1255,8 @@ mod tests { use super::*; use crate::engine::{MockExecutionContext, MockQueryContext}; use crate::test_data; - use crate::util::gdal::add_ndvi_dataset; use crate::util::Result; + use crate::util::gdal::add_ndvi_dataset; use geoengine_datatypes::hashmap; use geoengine_datatypes::primitives::{AxisAlignedRectangle, SpatialPartition2D, TimeInstance}; use geoengine_datatypes::raster::{ @@ -1260,7 +1266,7 @@ mod tests { use geoengine_datatypes::util::gdal::hide_gdal_errors; use geoengine_datatypes::{primitives::SpatialResolution, raster::GridShape2D}; use httptest::matchers::request; - use httptest::{responders, Expectation, Server}; + use httptest::{Expectation, Server, responders}; async fn query_gdal_source( exe_ctx: &MockExecutionContext, diff --git a/operators/src/source/ogr_source/dataset_iterator.rs b/operators/src/source/ogr_source/dataset_iterator.rs index 6ac2c6c665..443cb3a0c2 100644 --- a/operators/src/source/ogr_source/dataset_iterator.rs +++ b/operators/src/source/ogr_source/dataset_iterator.rs @@ -3,8 +3,8 @@ use super::{AttributeFilter, CsvHeader, FeaturesProvider, FormatSpecifics, OgrSourceDataset}; use crate::error::{self}; -use crate::util::gdal::gdal_open_dataset_ex; use crate::util::Result; +use crate::util::gdal::gdal_open_dataset_ex; use gdal::vector::sql::Dialect; use gdal::vector::{Feature, LayerAccess}; use gdal::{Dataset, DatasetOptions, GdalOpenFlags}; diff --git a/operators/src/source/ogr_source/mod.rs b/operators/src/source/ogr_source/mod.rs index 542ff382b6..6df84b63a4 100644 --- a/operators/src/source/ogr_source/mod.rs +++ b/operators/src/source/ogr_source/mod.rs @@ -5,8 +5,8 @@ use crate::engine::{ CanonicOperatorName, OperatorData, OperatorName, QueryProcessor, WorkflowOperatorPath, }; use crate::error::Error; -use crate::util::input::StringOrNumberRange; use crate::util::Result; +use crate::util::input::StringOrNumberRange; use crate::{ engine::{ InitializedVectorOperator, MetaData, QueryContext, SourceOperator, @@ -18,8 +18,8 @@ use async_trait::async_trait; use futures::future::BoxFuture; use futures::stream::{BoxStream, FusedStream}; use futures::task::Context; -use futures::{ready, Stream, StreamExt}; use futures::{Future, FutureExt}; +use futures::{Stream, StreamExt, ready}; use gdal::errors::GdalError; use gdal::vector::sql::ResultSet; use gdal::vector::{Feature, FieldValue, Layer, LayerAccess, LayerCaps, OGRwkbGeometryType}; @@ -779,7 +779,6 @@ impl FeaturesProvider<'_> { "{attribute} = {start}", attribute = attribute, start = escape_literal(s.start()), - ) } #[allow(clippy::float_cmp)] @@ -792,7 +791,6 @@ impl FeaturesProvider<'_> { "CAST({attribute} as bigint) = {start}", attribute = attribute, start = n.start(), - ), StringOrNumberRange::String(s) => { format!( @@ -1282,7 +1280,7 @@ where None => { return Err(Error::Gdal { source: GdalError::InvalidFieldIndex { method_name, index }, - }) + }); } }, Err(e) => return Err(e), @@ -1967,8 +1965,8 @@ mod tests { BoundingBox2D, FeatureData, Measurement, SpatialResolution, TimeGranularity, }; use geoengine_datatypes::spatial_reference::{SpatialReference, SpatialReferenceOption}; - use geoengine_datatypes::util::test::TestDefault; use geoengine_datatypes::util::Identifier; + use geoengine_datatypes::util::test::TestDefault; use serde_json::json; #[test] @@ -7068,12 +7066,14 @@ mod tests { #[tokio::test] #[allow(clippy::too_many_lines)] async fn creates_time_filter_string() -> Result<()> { - assert!(FeaturesProvider::create_time_filter_string( - OgrSourceDatasetTimeType::None, // Unsupported time type - TimeInterval::new_instant(0)?, - "PostgreSQL" - ) - .is_none()); + assert!( + FeaturesProvider::create_time_filter_string( + OgrSourceDatasetTimeType::None, // Unsupported time type + TimeInterval::new_instant(0)?, + "PostgreSQL" + ) + .is_none() + ); assert_eq!( FeaturesProvider::create_time_filter_string( @@ -7133,32 +7133,36 @@ mod tests { .to_string() ); - assert!(FeaturesProvider::create_time_filter_string( - OgrSourceDatasetTimeType::Start { - start_field: "start".to_string(), - start_format: Default::default(), - duration: OgrSourceDurationSpec::Value(TimeStep { - // Unsupported duration spec - granularity: TimeGranularity::Millis, - step: 10 - }), - }, - TimeInterval::new_instant(0)?, - "PostgreSQL" - ) - .is_none()); + assert!( + FeaturesProvider::create_time_filter_string( + OgrSourceDatasetTimeType::Start { + start_field: "start".to_string(), + start_format: Default::default(), + duration: OgrSourceDurationSpec::Value(TimeStep { + // Unsupported duration spec + granularity: TimeGranularity::Millis, + step: 10 + }), + }, + TimeInterval::new_instant(0)?, + "PostgreSQL" + ) + .is_none() + ); - assert!(FeaturesProvider::create_time_filter_string( - OgrSourceDatasetTimeType::StartDuration { - // Unsupported time type - start_field: "start".to_string(), - start_format: Default::default(), - duration_field: "duration".to_string(), - }, - TimeInterval::new_instant(0)?, - "PostgreSQL" - ) - .is_none()); + assert!( + FeaturesProvider::create_time_filter_string( + OgrSourceDatasetTimeType::StartDuration { + // Unsupported time type + start_field: "start".to_string(), + start_format: Default::default(), + duration_field: "duration".to_string(), + }, + TimeInterval::new_instant(0)?, + "PostgreSQL" + ) + .is_none() + ); assert_eq!( FeaturesProvider::create_time_filter_string( @@ -7206,40 +7210,46 @@ mod tests { r#""start" < '+262142-12-31T23:59:59.999Z'"#.to_string() ); - assert!(FeaturesProvider::create_time_filter_string( - OgrSourceDatasetTimeType::Start { - start_field: "start".to_string(), - start_format: Default::default(), - duration: OgrSourceDurationSpec::Infinite, - }, - TimeInterval::new_unchecked(-210_895_056_000_001, 8_210_266_876_799_999), // Exceeds Postgres range lower bound - "PostgreSQL" - ) - .is_none()); - - assert!(std::panic::catch_unwind(|| { + assert!( FeaturesProvider::create_time_filter_string( OgrSourceDatasetTimeType::Start { start_field: "start".to_string(), start_format: Default::default(), duration: OgrSourceDurationSpec::Infinite, }, - TimeInterval::new_unchecked(-210_895_056_000_000, 8_210_266_876_800_000), // Exceeds Postgres range upper bound (limited by TimeInstance upper bound, panics) - "PostgreSQL", + TimeInterval::new_unchecked(-210_895_056_000_001, 8_210_266_876_799_999), // Exceeds Postgres range lower bound + "PostgreSQL" ) - }) - .is_err()); + .is_none() + ); - assert!(FeaturesProvider::create_time_filter_string( - OgrSourceDatasetTimeType::Start { - start_field: "start".to_string(), - start_format: Default::default(), - duration: OgrSourceDurationSpec::Infinite, - }, - TimeInterval::new_instant(0)?, - "Unsupported driver" - ) - .is_none()); + assert!( + std::panic::catch_unwind(|| { + FeaturesProvider::create_time_filter_string( + OgrSourceDatasetTimeType::Start { + start_field: "start".to_string(), + start_format: Default::default(), + duration: OgrSourceDurationSpec::Infinite, + }, + TimeInterval::new_unchecked(-210_895_056_000_000, 8_210_266_876_800_000), // Exceeds Postgres range upper bound (limited by TimeInstance upper bound, panics) + "PostgreSQL", + ) + }) + .is_err() + ); + + assert!( + FeaturesProvider::create_time_filter_string( + OgrSourceDatasetTimeType::Start { + start_field: "start".to_string(), + start_format: Default::default(), + duration: OgrSourceDurationSpec::Infinite, + }, + TimeInterval::new_instant(0)?, + "Unsupported driver" + ) + .is_none() + ); Ok(()) } diff --git a/operators/src/util/async_util.rs b/operators/src/util/async_util.rs index bf97e07f1d..220119e892 100644 --- a/operators/src/util/async_util.rs +++ b/operators/src/util/async_util.rs @@ -1,9 +1,9 @@ -use futures::{future::BoxFuture, Future, FutureExt}; +use futures::{Future, FutureExt, future::BoxFuture}; use log::debug; use rayon::ThreadPool; use std::sync::Arc; use tokio::task::JoinHandle; -use tracing::{span, Level}; +use tracing::{Level, span}; use crate::{engine::QueryAbortTrigger, error, util::Result}; diff --git a/operators/src/util/gdal.rs b/operators/src/util/gdal.rs index 8fcdac6f92..7ff177edf6 100644 --- a/operators/src/util/gdal.rs +++ b/operators/src/util/gdal.rs @@ -118,8 +118,8 @@ pub fn add_ndvi_dataset(ctx: &mut MockExecutionContext) -> NamedData { } #[allow(clippy::missing_panics_doc)] -pub fn create_ports_meta_data( -) -> StaticMetaData { +pub fn create_ports_meta_data() +-> StaticMetaData { StaticMetaData { loading_info: OgrSourceDataset { file_name: test_data!("vector/data/ne_10m_ports/ne_10m_ports.shp").into(), diff --git a/operators/src/util/input/float_with_nan_serde.rs b/operators/src/util/input/float_with_nan_serde.rs index 51e0e45ac9..066db5423f 100644 --- a/operators/src/util/input/float_with_nan_serde.rs +++ b/operators/src/util/input/float_with_nan_serde.rs @@ -1,5 +1,5 @@ -use serde::de::{self, Visitor}; use serde::Serializer; +use serde::de::{self, Visitor}; use std::fmt; /// Serialize and deserialize floats with special treatment of NaN @@ -163,12 +163,14 @@ mod tests { // deserialize - assert!(serde_json::from_value::(serde_json::json!({ - "bar": "nan", - })) - .unwrap() - .bar - .is_nan()); + assert!( + serde_json::from_value::(serde_json::json!({ + "bar": "nan", + })) + .unwrap() + .bar + .is_nan() + ); assert_eq!( serde_json::from_value::(serde_json::json!({ @@ -215,13 +217,15 @@ mod tests { // deserialize - assert!(serde_json::from_value::(serde_json::json!({ - "bar": "nan", - })) - .unwrap() - .bar - .unwrap() - .is_nan()); + assert!( + serde_json::from_value::(serde_json::json!({ + "bar": "nan", + })) + .unwrap() + .bar + .unwrap() + .is_nan() + ); assert_eq!( serde_json::from_value::(serde_json::json!({ diff --git a/operators/src/util/input/multi_raster_or_vector.rs b/operators/src/util/input/multi_raster_or_vector.rs index 363b0dc599..49109bd4cb 100644 --- a/operators/src/util/input/multi_raster_or_vector.rs +++ b/operators/src/util/input/multi_raster_or_vector.rs @@ -66,12 +66,14 @@ mod tests { #[test] fn it_serializes() { - let operator = MultiRasterOrVectorOperator::Raster(vec![GdalSource { - params: GdalSourceParameters { - data: NamedData::with_namespaced_name("foo", "bar"), - }, - } - .boxed()]); + let operator = MultiRasterOrVectorOperator::Raster(vec![ + GdalSource { + params: GdalSourceParameters { + data: NamedData::with_namespaced_name("foo", "bar"), + }, + } + .boxed(), + ]); assert_eq!( serde_json::to_value(&operator).unwrap(), diff --git a/operators/src/util/input/string_or_number_range.rs b/operators/src/util/input/string_or_number_range.rs index 8d78bfb337..6fae64a79a 100644 --- a/operators/src/util/input/string_or_number_range.rs +++ b/operators/src/util/input/string_or_number_range.rs @@ -1,8 +1,8 @@ use std::{convert::TryFrom, ops::RangeInclusive}; use crate::error; -use crate::util::input::StringOrNumber; use crate::util::Result; +use crate::util::input::StringOrNumber; use geoengine_datatypes::primitives::FeatureDataValue; use num_traits::AsPrimitive; use serde::de::{Error, SeqAccess, Visitor}; @@ -357,13 +357,17 @@ mod tests { ); assert!(RangeInclusive::::try_from(StringOrNumberRange::Int(42..=43)).is_err()); - assert!(RangeInclusive::::try_from(StringOrNumberRange::String( - "foo".to_string()..="bar".to_string() - )) - .is_err()); - assert!(RangeInclusive::::try_from(StringOrNumberRange::String( - "foo".to_string()..="bar".to_string() - )) - .is_err()); + assert!( + RangeInclusive::::try_from(StringOrNumberRange::String( + "foo".to_string()..="bar".to_string() + )) + .is_err() + ); + assert!( + RangeInclusive::::try_from(StringOrNumberRange::String( + "foo".to_string()..="bar".to_string() + )) + .is_err() + ); } } diff --git a/operators/src/util/raster_stream_to_geotiff.rs b/operators/src/util/raster_stream_to_geotiff.rs index fcf6a60d03..d0588990b5 100644 --- a/operators/src/util/raster_stream_to_geotiff.rs +++ b/operators/src/util/raster_stream_to_geotiff.rs @@ -177,7 +177,7 @@ where None }; - gdal_config_options + let option_vars = gdal_config_options .as_deref() .map(TemporaryGdalThreadLocalConfigOptions::new); @@ -196,7 +196,9 @@ where if let Some(no_data) = gdal_tiff_metadata.no_data_value { band.set_no_data_value(Some(no_data))?; } else { - band.create_mask_band(false)?; + // only allowed option for internal masks + band.create_mask_band(true)?; + break; } } @@ -211,6 +213,8 @@ where window_end, }; + drop(option_vars); + Ok((initial_tile_time, file_path, dataset, writer)) } @@ -694,7 +698,7 @@ impl GdalDatasetHolder

{ if self .intermediate_dataset .as_ref() - .map_or(true, |x| x.time_interval != time_interval) + .is_none_or(|x| x.time_interval != time_interval) { if let Some(intermediate_dataset) = self.intermediate_dataset.take() { self.dataset_writer.finish_dataset(intermediate_dataset)?; @@ -858,10 +862,9 @@ impl GdalDatasetWriter

{ // No-data masks are described by the rasterio docs as: // "One is the the valid data mask from GDAL, an unsigned byte array with the same number of rows and columns as the dataset in which non-zero elements (typically 255) indicate that the corresponding data elements are valid. Other elements are invalid, or nodata elements." - let mask_grid_gdal_values = - masked_grid - .validity_mask - .map_elements(|is_valid| if is_valid { 255_u8 } else { 0 }); // TODO: investigate if we can transmute the vec of bool to u8. + let mask_grid_gdal_values = masked_grid + .validity_mask + .map_elements(|is_valid| if is_valid { 255_u8 } else { 0 }); // TODO: investigate if we can transmute the vec of bool to u8. let mut mask_buffer = Buffer::new(window_size, mask_grid_gdal_values.data); let mut mask_band = raster_band.open_mask_band()?; @@ -1008,26 +1011,27 @@ fn geotiff_to_cog( #[cfg(test)] mod tests { - use std::marker::PhantomData; - use std::ops::Add; - + use super::*; + use crate::engine::RasterResultDescriptor; + use crate::mock::MockRasterSourceProcessor; + use crate::util::gdal::gdal_open_dataset; + use crate::{ + engine::MockQueryContext, source::GdalSourceProcessor, util::gdal::create_ndvi_meta_data, + }; use geoengine_datatypes::primitives::CacheHint; use geoengine_datatypes::primitives::{DateTime, Duration}; use geoengine_datatypes::raster::{Grid, RasterDataType}; + use geoengine_datatypes::test_data; + use geoengine_datatypes::util::{ + ImageFormat, assert_image_equals, assert_image_equals_with_format, + }; use geoengine_datatypes::{ primitives::{Coordinate2D, SpatialPartition2D, SpatialResolution, TimeInterval}, raster::TilingSpecification, util::test::TestDefault, }; - - use crate::engine::RasterResultDescriptor; - use crate::mock::MockRasterSourceProcessor; - use crate::util::gdal::gdal_open_dataset; - use crate::{ - engine::MockQueryContext, source::GdalSourceProcessor, util::gdal::create_ndvi_meta_data, - }; - - use super::*; + use std::marker::PhantomData; + use std::ops::Add; #[tokio::test] async fn geotiff_with_no_data_from_stream() { @@ -1083,10 +1087,9 @@ mod tests { // "../test_data/raster/geotiff_from_stream_compressed.tiff", // ); - assert_eq!( - include_bytes!("../../../test_data/raster/geotiff_from_stream_compressed.tiff") - as &[u8], - bytes.as_slice() + assert_image_equals( + test_data!("raster/geotiff_from_stream_compressed.tiff"), + &bytes, ); } @@ -1139,11 +1142,9 @@ mod tests { .await .unwrap(); - assert_eq!( - include_bytes!( - "../../../test_data/raster/geotiff_with_mask_from_stream_compressed.tiff" - ) as &[u8], - bytes.as_slice() + assert_image_equals( + test_data!("raster/geotiff_with_mask_from_stream_compressed.tiff"), + &bytes, ); } @@ -1201,10 +1202,10 @@ mod tests { // "../test_data/raster/geotiff_big_tiff_from_stream_compressed.tiff", // ); - assert_eq!( - include_bytes!("../../../test_data/raster/geotiff_big_tiff_from_stream_compressed.tiff") - as &[u8], - bytes.as_slice() + assert_image_equals_with_format( + test_data!("raster/geotiff_big_tiff_from_stream_compressed.tiff"), + &bytes, + ImageFormat::Tiff, ); } @@ -1258,15 +1259,14 @@ mod tests { .unwrap(); // geoengine_datatypes::util::test::save_test_bytes( - // &bytes, - // "../test_data/raster/cloud_optimized_geotiff_big_tiff_from_stream_compressed.tiff", - //); - - assert_eq!( - include_bytes!( - "../../../test_data/raster/cloud_optimized_geotiff_big_tiff_from_stream_compressed.tiff" - ) as &[u8], - bytes.as_slice() + // &bytes, + // "../test_data/raster/cloud_optimized_geotiff_big_tiff_from_stream_compressed.tiff", + // ); + + assert_image_equals_with_format( + test_data!("raster/cloud_optimized_geotiff_big_tiff_from_stream_compressed.tiff"), + &bytes, + ImageFormat::Tiff, ); // TODO: check programmatically that intermediate file is gone @@ -1326,11 +1326,9 @@ mod tests { // "../test_data/raster/cloud_optimized_geotiff_from_stream_compressed.tiff", // ); - assert_eq!( - include_bytes!( - "../../../test_data/raster/cloud_optimized_geotiff_from_stream_compressed.tiff" - ) as &[u8], - bytes.as_slice() + assert_image_equals( + test_data!("raster/cloud_optimized_geotiff_from_stream_compressed.tiff"), + &bytes, ); // TODO: check programmatically that intermediate file is gone @@ -1401,17 +1399,19 @@ mod tests { // ); // } - assert_eq!( - include_bytes!("../../../test_data/raster/cloud_optimized_geotiff_timestep_0_from_stream_compressed.tiff") as &[u8], - bytes.pop().expect("bytes should have length 3").as_slice() + assert_image_equals( + test_data!("raster/cloud_optimized_geotiff_timestep_0_from_stream_compressed.tiff"), + bytes.pop().expect("bytes should have length 3").as_slice(), ); - assert_eq!( - include_bytes!("../../../test_data/raster/cloud_optimized_geotiff_timestep_1_from_stream_compressed.tiff") as &[u8], - bytes.pop().expect("bytes should have length 3").as_slice() + + assert_image_equals( + test_data!("raster/cloud_optimized_geotiff_timestep_1_from_stream_compressed.tiff"), + bytes.pop().expect("bytes should have length 3").as_slice(), ); - assert_eq!( - include_bytes!("../../../test_data/raster/cloud_optimized_geotiff_timestep_2_from_stream_compressed.tiff") as &[u8], - bytes.pop().expect("bytes should have length 3").as_slice() + + assert_image_equals( + test_data!("raster/cloud_optimized_geotiff_timestep_2_from_stream_compressed.tiff"), + bytes.pop().expect("bytes should have length 3").as_slice(), ); // TODO: check programmatically that intermediate file is gone diff --git a/operators/src/util/raster_stream_to_png.rs b/operators/src/util/raster_stream_to_png.rs index a8a9478b85..1614af4993 100644 --- a/operators/src/util/raster_stream_to_png.rs +++ b/operators/src/util/raster_stream_to_png.rs @@ -2,7 +2,7 @@ use super::abortable_query_execution; use crate::engine::{QueryAbortTrigger, QueryContext, QueryProcessor, RasterQueryProcessor}; use crate::util::Result; use futures::TryStreamExt; -use futures::{future::BoxFuture, StreamExt}; +use futures::{StreamExt, future::BoxFuture}; use geoengine_datatypes::error::{BoxedResultExt, ErrorSource}; use geoengine_datatypes::operations::image::{ColorMapper, RgbParams}; use geoengine_datatypes::raster::{FromIndexFn, GridIndexAccess, GridShapeAccess}; @@ -14,7 +14,7 @@ use geoengine_datatypes::{ use num_traits::AsPrimitive; use snafu::Snafu; use std::convert::TryInto; -use tracing::{span, Level}; +use tracing::{Level, span}; /// # Panics /// Panics if not three bands were queried. diff --git a/operators/src/util/rayon.rs b/operators/src/util/rayon.rs index 401952dfa2..b61df25eeb 100644 --- a/operators/src/util/rayon.rs +++ b/operators/src/util/rayon.rs @@ -11,16 +11,18 @@ use rayon::{ThreadPool, ThreadPoolBuilder}; /// Panics if building the global thread pool does not fail. /// fn rayon_destroy_global_thread_pool() { - assert!(rayon::ThreadPoolBuilder::new() - .num_threads(1) - .spawn_handler(|_thread| { - Err(std::io::Error::new( - std::io::ErrorKind::Other, - "Do not spawn rayon global pool on purpose", - )) - }) - .build_global() - .is_err()); + assert!( + rayon::ThreadPoolBuilder::new() + .num_threads(1) + .spawn_handler(|_thread| { + Err(std::io::Error::new( + std::io::ErrorKind::Other, + "Do not spawn rayon global pool on purpose", + )) + }) + .build_global() + .is_err() + ); } /// Create a rayon thread pool with the given number of threads. diff --git a/operators/src/util/retry.rs b/operators/src/util/retry.rs index 9ad171bb2e..7bcd4366d6 100644 --- a/operators/src/util/retry.rs +++ b/operators/src/util/retry.rs @@ -44,8 +44,8 @@ where #[cfg(test)] mod tests { - use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; + use std::sync::atomic::{AtomicUsize, Ordering}; use std::task::Poll; use futures::future::{err, ok, poll_fn}; diff --git a/operators/src/util/stream_zip/vec_zip.rs b/operators/src/util/stream_zip/vec_zip.rs index 97fe897218..e037b23ea3 100644 --- a/operators/src/util/stream_zip/vec_zip.rs +++ b/operators/src/util/stream_zip/vec_zip.rs @@ -206,8 +206,8 @@ mod tests { use super::*; use async_stream::stream; - use futures::stream::BoxStream; use futures::StreamExt; + use futures::stream::BoxStream; #[tokio::test] async fn concurrent_stream() { diff --git a/operators/src/util/temporary_gdal_thread_local_config_options.rs b/operators/src/util/temporary_gdal_thread_local_config_options.rs index 18a04f30c3..4074d82ee4 100644 --- a/operators/src/util/temporary_gdal_thread_local_config_options.rs +++ b/operators/src/util/temporary_gdal_thread_local_config_options.rs @@ -11,13 +11,8 @@ impl TemporaryGdalThreadLocalConfigOptions { let mut original_configs = vec![]; for (key, value) in configs { - let old = gdal::config::get_thread_local_config_option(key, "").map(|value| { - if value.is_empty() { - None - } else { - Some(value) - } - })?; + let old = gdal::config::get_thread_local_config_option(key, "") + .map(|value| if value.is_empty() { None } else { Some(value) })?; // TODO: check if overriding existing config (local & global) is ok for the given key gdal::config::set_thread_local_config_option(key, value)?; diff --git a/operators/tests/streams.rs b/operators/tests/streams.rs index 2544cc15c4..080c860dbb 100644 --- a/operators/tests/streams.rs +++ b/operators/tests/streams.rs @@ -1,7 +1,7 @@ +use futures::Stream; use futures::executor::block_on_stream; use futures::stream; use futures::task::Poll; -use futures::Stream; pub fn fn_stream() -> impl Stream { let mut counter: usize = 2; diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 4df8ddff2c..15265f1edb 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.84.0" +channel = "1.85.0" components = ["cargo", "rustfmt", "rust-src", "clippy", "llvm-tools"] diff --git a/services/benches/quota_check.rs b/services/benches/quota_check.rs index f757a2bc99..f83b7f9a0d 100644 --- a/services/benches/quota_check.rs +++ b/services/benches/quota_check.rs @@ -12,7 +12,7 @@ use geoengine_operators::{ source::{GdalSource, GdalSourceParameters}, }; use geoengine_services::{ - config::{get_config_element, Quota, QuotaTrackingMode}, + config::{Quota, QuotaTrackingMode, get_config_element}, contexts::{ApplicationContext, SessionContext}, users::{UserAuth, UserDb}, util::tests::{add_ndvi_to_datasets2, send_test_request, with_temp_context}, diff --git a/services/src/api/handlers/datasets.rs b/services/src/api/handlers/datasets.rs index b866080087..c4762c8743 100755 --- a/services/src/api/handlers/datasets.rs +++ b/services/src/api/handlers/datasets.rs @@ -1,21 +1,21 @@ use crate::{ api::model::{ operators::{GdalLoadingInfoTemporalSlice, GdalMetaDataList}, - responses::datasets::{errors::*, DatasetNameResponse}, + responses::datasets::{DatasetNameResponse, errors::*}, services::{ AddDataset, CreateDataset, DataPath, DatasetDefinition, MetaDataDefinition, MetaDataSuggestion, Provenances, UpdateDataset, }, }, - config::{get_config_element, Data}, + config::{Data, get_config_element}, contexts::{ApplicationContext, SessionContext}, datasets::{ + DatasetName, listing::{DatasetListOptions, DatasetProvider}, storage::{AutoCreateDataset, DatasetStore, SuggestMetaData}, upload::{ AdjustFilePath, Upload, UploadDb, UploadId, UploadRootPath, Volume, VolumeName, Volumes, }, - DatasetName, }, error::{self, Error, Result}, permissions::{Permission, PermissionDb, Role}, @@ -25,10 +25,10 @@ use crate::{ path_with_base_path, }, }; -use actix_web::{web, FromRequest, HttpResponse, HttpResponseBuilder, Responder}; +use actix_web::{FromRequest, HttpResponse, HttpResponseBuilder, Responder, web}; use gdal::{ - vector::{Layer, LayerAccess, OGRFieldType}, Dataset, DatasetOptions, + vector::{Layer, LayerAccess, OGRFieldType}, }; use geoengine_datatypes::{ collections::VectorDataType, @@ -61,7 +61,6 @@ use utoipa::{ToResponse, ToSchema}; pub(crate) fn init_dataset_routes(cfg: &mut web::ServiceConfig) where C: ApplicationContext, - C::Session: FromRequest, { cfg.service( @@ -799,7 +798,7 @@ fn select_layer_from_dataset<'a>( dataset: &'a Dataset, layer_name: &Option, ) -> Result> { - if let Some(ref layer_name) = layer_name { + if let Some(layer_name) = layer_name { dataset.layer_by_name(layer_name).map_err(|_| { crate::error::Error::DatasetInvalidLayerName { layer_name: layer_name.clone(), @@ -1367,14 +1366,14 @@ async fn create_system_dataset( mod tests { use super::*; use crate::api::model::datatypes::NamedData; - use crate::api::model::responses::datasets::DatasetNameResponse; use crate::api::model::responses::IdResponse; + use crate::api::model::responses::datasets::DatasetNameResponse; use crate::api::model::services::{DatasetDefinition, Provenance}; use crate::contexts::PostgresContext; use crate::contexts::{Session, SessionId}; + use crate::datasets::DatasetIdAndName; use crate::datasets::storage::DatasetStore; use crate::datasets::upload::{UploadId, VolumeName}; - use crate::datasets::DatasetIdAndName; use crate::error::Result; use crate::ge_context; use crate::projects::{PointSymbology, RasterSymbology, Symbology}; @@ -1382,8 +1381,8 @@ mod tests { use crate::users::UserAuth; use crate::util::tests::admin_login; use crate::util::tests::{ - add_file_definition_to_datasets, read_body_json, read_body_string, send_test_request, - MockQueryContext, SetMultipartBody, TestDataUploads, + MockQueryContext, SetMultipartBody, TestDataUploads, add_file_definition_to_datasets, + read_body_json, read_body_string, send_test_request, }; use actix_web; use actix_web::http::header; @@ -1404,7 +1403,7 @@ mod tests { OgrSource, OgrSourceDataset, OgrSourceErrorSpec, OgrSourceParameters, }; use geoengine_operators::util::gdal::create_ndvi_meta_data; - use serde_json::{json, Value}; + use serde_json::{Value, json}; use tokio_postgres::NoTls; #[ge_context::test] diff --git a/services/src/api/handlers/ebv.rs b/services/src/api/handlers/ebv.rs index 6bb78af261..0bd2071db1 100644 --- a/services/src/api/handlers/ebv.rs +++ b/services/src/api/handlers/ebv.rs @@ -6,8 +6,8 @@ use super::tasks::TaskResponse; use crate::api::model::datatypes::ResamplingMethod; use crate::contexts::ApplicationContext; use crate::datasets::external::netcdfcf::{ - error, EbvPortalDataProvider, NetCdfCf4DProviderError, OverviewGeneration, EBV_PROVIDER_ID, - NETCDF_CF_PROVIDER_ID, + EBV_PROVIDER_ID, EbvPortalDataProvider, NETCDF_CF_PROVIDER_ID, NetCdfCf4DProviderError, + OverviewGeneration, error, }; use crate::error::Result; use crate::layers::storage::LayerProviderDb; @@ -15,8 +15,8 @@ use crate::tasks::{Task, TaskContext, TaskId, TaskManager, TaskStatus, TaskStatu use crate::util::apidoc::{OpenApiServerInfo, TransformSchemasWithTag}; use crate::{contexts::SessionContext, datasets::external::netcdfcf::NetCdfCfDataProvider}; use actix_web::{ - web::{self, ServiceConfig}, FromRequest, Responder, + web::{self, ServiceConfig}, }; use futures::channel::oneshot; use futures::lock::Mutex; @@ -305,7 +305,9 @@ impl Task for EbvMultiOverviewTask { } TaskStatus::Running(_) => { // must not happen, since we used the callback - debug!("Ran into task status that must not happend: running/aborted after finish"); + debug!( + "Ran into task status that must not happend: running/aborted after finish" + ); } } } else { @@ -674,7 +676,7 @@ mod tests { util::server::{configure_extractors, render_404, render_405}, util::tests::read_body_string, }; - use actix_web::{dev::ServiceResponse, http, http::header, middleware, test, web, App}; + use actix_web::{App, dev::ServiceResponse, http, http::header, middleware, test, web}; use actix_web_httpauth::headers::authorization::Bearer; use geoengine_datatypes::test_data; use geoengine_datatypes::util::gdal::hide_gdal_errors; @@ -791,11 +793,12 @@ mod tests { assert!(is_empty(overview_folder.path())); - assert!(!ctx - .db() - .overviews_exist(provider_id, "dataset_m.nc") - .await - .unwrap()); + assert!( + !ctx.db() + .overviews_exist(provider_id, "dataset_m.nc") + .await + .unwrap() + ); } #[ge_context::test] diff --git a/services/src/api/handlers/layers.rs b/services/src/api/handlers/layers.rs index c99491bc30..9523d59947 100644 --- a/services/src/api/handlers/layers.rs +++ b/services/src/api/handlers/layers.rs @@ -2,7 +2,7 @@ use crate::api::model::datatypes::{DataProviderId, LayerId}; use crate::api::model::responses::IdResponse; use crate::config::get_config_element; use crate::contexts::ApplicationContext; -use crate::datasets::{schedule_raster_dataset_from_workflow_task, RasterDatasetFromWorkflow}; +use crate::datasets::{RasterDatasetFromWorkflow, schedule_raster_dataset_from_workflow_task}; use crate::error::Error::NotImplemented; use crate::error::{Error, Result}; use crate::layers::layer::{ @@ -18,7 +18,7 @@ use crate::util::workflows::validate_workflow; use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::WorkflowId; use crate::{contexts::SessionContext, layers::layer::LayerCollectionListOptions}; -use actix_web::{web, FromRequest, HttpResponse, Responder}; +use actix_web::{FromRequest, HttpResponse, Responder, web}; use geoengine_datatypes::primitives::{BandSelection, QueryRectangle}; use geoengine_operators::engine::WorkflowOperatorPath; use serde::{Deserialize, Serialize}; @@ -1196,7 +1196,7 @@ mod tests { use crate::users::{UserAuth, UserSession}; use crate::util::tests::admin_login; use crate::util::tests::{ - read_body_string, send_test_request, MockQueryContext, TestDataUploads, + MockQueryContext, TestDataUploads, read_body_string, send_test_request, }; use crate::{contexts::Session, workflows::workflow::Workflow}; use actix_web::dev::ServiceResponse; @@ -1219,7 +1219,7 @@ mod tests { use geoengine_operators::processing::{TimeShift, TimeShiftParams}; use geoengine_operators::source::{GdalSource, GdalSourceParameters}; use geoengine_operators::util::raster_stream_to_geotiff::{ - raster_stream_to_geotiff_bytes, GdalGeoTiffDatasetMetadata, GdalGeoTiffOptions, + GdalGeoTiffDatasetMetadata, GdalGeoTiffOptions, raster_stream_to_geotiff_bytes, }; use geoengine_operators::{ engine::VectorOperator, diff --git a/services/src/api/handlers/machine_learning.rs b/services/src/api/handlers/machine_learning.rs index 2b12c54976..33d26ad457 100644 --- a/services/src/api/handlers/machine_learning.rs +++ b/services/src/api/handlers/machine_learning.rs @@ -1,10 +1,10 @@ -use actix_web::{web, FromRequest, HttpResponse, ResponseError}; +use actix_web::{FromRequest, HttpResponse, ResponseError, web}; use crate::{ - api::model::responses::{ml_models::MlModelNameResponse, ErrorResponse}, + api::model::responses::{ErrorResponse, ml_models::MlModelNameResponse}, contexts::{ApplicationContext, SessionContext}, machine_learning::{ - error::MachineLearningError, name::MlModelName, MlModel, MlModelDb, MlModelListOptions, + MlModel, MlModelDb, MlModelListOptions, error::MachineLearningError, name::MlModelName, }, }; @@ -138,7 +138,7 @@ mod tests { ge_context, machine_learning::MlModelMetadata, users::UserAuth, - util::tests::{send_test_request, SetMultipartBody, TestDataUploads}, + util::tests::{SetMultipartBody, TestDataUploads, send_test_request}, }; use actix_http::header; use actix_web::test; diff --git a/services/src/api/handlers/mod.rs b/services/src/api/handlers/mod.rs index d61134aa3e..febe74b2da 100644 --- a/services/src/api/handlers/mod.rs +++ b/services/src/api/handlers/mod.rs @@ -1,7 +1,7 @@ use crate::contexts::SessionId; use crate::error::{Error, Result}; -use actix_web::http::header; use actix_web::HttpRequest; +use actix_web::http::header; use actix_web_httpauth::headers::authorization::{Bearer, Scheme}; use std::str::FromStr; diff --git a/services/src/api/handlers/permissions.rs b/services/src/api/handlers/permissions.rs index e85e858d3a..4884ff610f 100644 --- a/services/src/api/handlers/permissions.rs +++ b/services/src/api/handlers/permissions.rs @@ -1,7 +1,7 @@ use crate::api::model::datatypes::LayerId; use crate::contexts::{ApplicationContext, GeoEngineDb, SessionContext}; -use crate::datasets::storage::DatasetDb; use crate::datasets::DatasetName; +use crate::datasets::storage::DatasetDb; use crate::error::{self, Error, Result}; use crate::layers::listing::LayerCollectionId; use crate::machine_learning::MlModelDb; @@ -9,7 +9,7 @@ use crate::permissions::{ Permission, PermissionDb, PermissionListing as DbPermissionListing, ResourceId, Role, RoleId, }; use crate::projects::ProjectId; -use actix_web::{web, FromRequest, HttpResponse}; +use actix_web::{FromRequest, HttpResponse, web}; use geoengine_datatypes::error::BoxedResultExt; use geoengine_datatypes::machine_learning::MlModelName; use serde::{Deserialize, Serialize}; @@ -21,7 +21,6 @@ use uuid::Uuid; pub(crate) fn init_permissions_routes(cfg: &mut web::ServiceConfig) where C: ApplicationContext, - C::Session: FromRequest, { cfg.service( @@ -140,7 +139,7 @@ impl TryFrom<(String, String)> for Resource { return Err(Error::InvalidResourceId { resource_type: value.0, resource_id: value.1, - }) + }); } }) } @@ -300,7 +299,7 @@ mod tests { mock::{MockPointSource, MockPointSourceParams}, source::{GdalSource, GdalSourceParameters, OgrSource, OgrSourceParameters}, }; - use serde_json::{json, Value}; + use serde_json::{Value, json}; use tokio_postgres::NoTls; #[ge_context::test] @@ -356,11 +355,12 @@ mod tests { // check that workflow can only be intitialized after adding permissions - assert!(gdal - .clone() - .initialize(WorkflowOperatorPath::initialize_root(), &exe_ctx) - .await - .is_err()); + assert!( + gdal.clone() + .initialize(WorkflowOperatorPath::initialize_root(), &exe_ctx) + .await + .is_err() + ); admin_ctx .db() @@ -372,16 +372,18 @@ mod tests { .await .unwrap(); - assert!(gdal - .initialize(WorkflowOperatorPath::initialize_root(), &exe_ctx) - .await - .is_ok()); + assert!( + gdal.initialize(WorkflowOperatorPath::initialize_root(), &exe_ctx) + .await + .is_ok() + ); - assert!(ogr - .clone() - .initialize(WorkflowOperatorPath::initialize_root(), &exe_ctx) - .await - .is_err()); + assert!( + ogr.clone() + .initialize(WorkflowOperatorPath::initialize_root(), &exe_ctx) + .await + .is_err() + ); admin_ctx .db() @@ -393,10 +395,11 @@ mod tests { .await .unwrap(); - assert!(ogr - .initialize(WorkflowOperatorPath::initialize_root(), &exe_ctx) - .await - .is_ok()); + assert!( + ogr.initialize(WorkflowOperatorPath::initialize_root(), &exe_ctx) + .await + .is_ok() + ); } #[ge_context::test] diff --git a/services/src/api/handlers/plots.rs b/services/src/api/handlers/plots.rs index bee74822da..a06f44e4fd 100644 --- a/services/src/api/handlers/plots.rs +++ b/services/src/api/handlers/plots.rs @@ -8,7 +8,7 @@ use crate::util::parsing::parse_spatial_resolution; use crate::util::server::connection_closed; use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::WorkflowId; -use actix_web::{web, FromRequest, HttpRequest, Responder}; +use actix_web::{FromRequest, HttpRequest, Responder, web}; use base64::Engine; use geoengine_datatypes::operations::reproject::reproject_query; use geoengine_datatypes::plots::PlotOutputFormat; @@ -218,7 +218,7 @@ mod tests { use crate::workflows::workflow::Workflow; use actix_web; use actix_web::dev::ServiceResponse; - use actix_web::http::{header, Method}; + use actix_web::http::{Method, header}; use actix_web_httpauth::headers::authorization::Bearer; use geoengine_datatypes::primitives::CacheHint; use geoengine_datatypes::primitives::DateTime; @@ -234,7 +234,7 @@ mod tests { use geoengine_operators::plot::{ Histogram, HistogramBounds, HistogramBuckets, HistogramParams, Statistics, StatisticsParams, }; - use serde_json::{json, Value}; + use serde_json::{Value, json}; use tokio_postgres::NoTls; fn example_raster_source() -> Box { diff --git a/services/src/api/handlers/projects.rs b/services/src/api/handlers/projects.rs index 8d9289d423..245ecabefb 100644 --- a/services/src/api/handlers/projects.rs +++ b/services/src/api/handlers/projects.rs @@ -7,10 +7,10 @@ use crate::projects::{ UpdateProject, }; use crate::util::extractors::{ValidatedJson, ValidatedQuery}; -use actix_web::{web, FromRequest, HttpResponse, Responder, ResponseError}; +use actix_web::{FromRequest, HttpResponse, Responder, ResponseError, web}; use geoengine_datatypes::util::helpers::ge_report; -use snafu::prelude::*; use snafu::ResultExt; +use snafu::prelude::*; use std::fmt; use strum::IntoStaticStr; @@ -405,13 +405,13 @@ mod tests { ProjectListing, RasterSymbology, STRectangle, Symbology, UpdateProject, }; use crate::users::{UserAuth, UserSession}; + use crate::util::Identifier; use crate::util::tests::{ check_allowed_http_methods, create_project_helper, send_test_request, update_project_helper, }; - use crate::util::Identifier; use crate::workflows::workflow::WorkflowId; use actix_web::dev::ServiceResponse; - use actix_web::{http::header, http::Method, test}; + use actix_web::{http::Method, http::header, test}; use actix_web_httpauth::headers::authorization::Bearer; use geoengine_datatypes::operations::image::{Colorizer, RasterColorizer}; use geoengine_datatypes::primitives::{TimeGranularity, TimeStep}; diff --git a/services/src/api/handlers/spatial_references.rs b/services/src/api/handlers/spatial_references.rs index 227cb16894..82a5617ec5 100755 --- a/services/src/api/handlers/spatial_references.rs +++ b/services/src/api/handlers/spatial_references.rs @@ -3,7 +3,7 @@ use crate::api::model::datatypes::{ }; use crate::contexts::ApplicationContext; use crate::{error::Error, error::Result}; -use actix_web::{web, FromRequest, Responder}; +use actix_web::{FromRequest, Responder, web}; use proj_sys::PJ_PROJ_STRING_TYPE_PJ_PROJ_4; use serde::{Deserialize, Serialize}; use std::str::FromStr; @@ -45,20 +45,26 @@ impl ProjJson { pub fn axis_order(&self) -> Option { match &self.coordinate_system.axis { Some(axes) => match *axes.as_slice() { - [ProjJsonAxis { - direction: ProjJsonAxisDirection::North, - .. - }, ProjJsonAxis { - direction: ProjJsonAxisDirection::East, - .. - }] => Some(AxisOrder::NorthEast), - [ProjJsonAxis { - direction: ProjJsonAxisDirection::East, - .. - }, ProjJsonAxis { - direction: ProjJsonAxisDirection::North, - .. - }] => Some(AxisOrder::EastNorth), + [ + ProjJsonAxis { + direction: ProjJsonAxisDirection::North, + .. + }, + ProjJsonAxis { + direction: ProjJsonAxisDirection::East, + .. + }, + ] => Some(AxisOrder::NorthEast), + [ + ProjJsonAxis { + direction: ProjJsonAxisDirection::East, + .. + }, + ProjJsonAxis { + direction: ProjJsonAxisDirection::North, + .. + }, + ] => Some(AxisOrder::EastNorth), _ => None, }, _ => None, diff --git a/services/src/api/handlers/tasks.rs b/services/src/api/handlers/tasks.rs index ffcd4a60f3..c94dfe01ce 100644 --- a/services/src/api/handlers/tasks.rs +++ b/services/src/api/handlers/tasks.rs @@ -3,7 +3,7 @@ use crate::error::Result; use crate::tasks::{TaskListOptions, TaskManager, TaskStatusWithId}; use crate::util::extractors::ValidatedQuery; use crate::{contexts::SessionContext, tasks::TaskId}; -use actix_web::{web, FromRequest, HttpResponse, Responder}; +use actix_web::{FromRequest, HttpResponse, Responder, web}; use serde::{Deserialize, Serialize}; use utoipa::{IntoParams, ToSchema}; @@ -172,7 +172,7 @@ mod tests { use crate::contexts::Session; use crate::ge_context; use crate::tasks::{ - util::test::wait_for_task_to_finish, Task, TaskContext, TaskStatus, TaskStatusInfo, + Task, TaskContext, TaskStatus, TaskStatusInfo, util::test::wait_for_task_to_finish, }; use crate::users::UserAuth; use crate::util::tests::{read_body_json, send_test_request}; diff --git a/services/src/api/handlers/upload.rs b/services/src/api/handlers/upload.rs index 9f042edb43..2bce18129a 100644 --- a/services/src/api/handlers/upload.rs +++ b/services/src/api/handlers/upload.rs @@ -5,7 +5,7 @@ use crate::error::Result; use crate::error::{self, Error}; use crate::util::path_with_base_path; use actix_multipart::Multipart; -use actix_web::{web, FromRequest, Responder}; +use actix_web::{FromRequest, Responder, web}; use futures::StreamExt; use gdal::vector::LayerAccess; use geoengine_datatypes::util::Identifier; @@ -203,7 +203,7 @@ mod tests { use crate::contexts::Session; use crate::ge_context; use crate::users::UserAuth; - use crate::util::tests::{send_test_request, SetMultipartBody, TestDataUploads}; + use crate::util::tests::{SetMultipartBody, TestDataUploads, send_test_request}; use actix_web::{http::header, test}; use actix_web_httpauth::headers::authorization::Bearer; use tokio_postgres::NoTls; diff --git a/services/src/api/handlers/users.rs b/services/src/api/handlers/users.rs index 7ea30baa5e..dacb0918c2 100644 --- a/services/src/api/handlers/users.rs +++ b/services/src/api/handlers/users.rs @@ -19,12 +19,12 @@ use crate::users::UserSession; use crate::users::{AuthCodeRequestURL, AuthCodeResponse, RoleDb, UserCredentials}; use crate::util::extractors::ValidatedJson; use actix_web::FromRequest; -use actix_web::{web, HttpResponse, Responder}; +use actix_web::{HttpResponse, Responder, web}; use geoengine_datatypes::error::BoxedResultExt; use serde::Deserialize; use serde::Serialize; -use snafu::ensure; use snafu::ResultExt; +use snafu::ensure; use utoipa::IntoParams; use utoipa::ToSchema; use uuid::Uuid; @@ -892,8 +892,8 @@ mod tests { use crate::permissions::Role; use crate::users::{AuthCodeRequestURL, OidcManager, UserAuth, UserId}; use crate::util::tests::mock_oidc::{ - mock_refresh_server, mock_token_response, mock_valid_provider_discovery, - MockRefreshServerConfig, MockTokenConfig, SINGLE_STATE, + MockRefreshServerConfig, MockTokenConfig, SINGLE_STATE, mock_refresh_server, + mock_token_response, mock_valid_provider_discovery, }; use crate::util::tests::{ admin_login, create_project_helper2, create_session_helper, register_ndvi_workflow_helper, @@ -901,7 +901,7 @@ mod tests { use crate::util::tests::{check_allowed_http_methods, read_body_string, send_test_request}; use actix_http::header::CONTENT_TYPE; use actix_web::dev::ServiceResponse; - use actix_web::{http::header, http::Method, test}; + use actix_web::{http::Method, http::header, test}; use actix_web_httpauth::headers::authorization::Bearer; use core::time::Duration; use geoengine_datatypes::operations::image::{Colorizer, RgbaColor}; diff --git a/services/src/api/handlers/wcs.rs b/services/src/api/handlers/wcs.rs index 02e90c33a5..ef62dc74c1 100644 --- a/services/src/api/handlers/wcs.rs +++ b/services/src/api/handlers/wcs.rs @@ -1,16 +1,16 @@ -use crate::api::handlers::spatial_references::{spatial_reference_specification, AxisOrder}; +use crate::api::handlers::spatial_references::{AxisOrder, spatial_reference_specification}; use crate::api::model::datatypes::TimeInterval; -use crate::api::ogc::util::{ogc_endpoint_url, OgcProtocol, OgcRequestGuard}; +use crate::api::ogc::util::{OgcProtocol, OgcRequestGuard, ogc_endpoint_url}; use crate::api::ogc::wcs::request::{DescribeCoverage, GetCapabilities, GetCoverage, WcsVersion}; use crate::config; use crate::config::get_config_element; use crate::contexts::{ApplicationContext, SessionContext}; use crate::error::Result; use crate::error::{self, Error}; -use crate::util::server::{connection_closed, not_implemented_handler, CacheControlHeader}; +use crate::util::server::{CacheControlHeader, connection_closed, not_implemented_handler}; use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::WorkflowId; -use actix_web::{web, FromRequest, HttpRequest, HttpResponse}; +use actix_web::{FromRequest, HttpRequest, HttpResponse, web}; use geoengine_datatypes::primitives::{ AxisAlignedRectangle, BandSelection, RasterQueryRectangle, SpatialPartition2D, }; @@ -22,7 +22,7 @@ use geoengine_operators::engine::{ResultDescriptor, SingleRasterOrVectorSource}; use geoengine_operators::processing::{Reprojection, ReprojectionParams}; use geoengine_operators::util::input::RasterOrVectorOperator; use geoengine_operators::util::raster_stream_to_geotiff::{ - raster_stream_to_multiband_geotiff_bytes, GdalGeoTiffDatasetMetadata, GdalGeoTiffOptions, + GdalGeoTiffDatasetMetadata, GdalGeoTiffOptions, raster_stream_to_multiband_geotiff_bytes, }; use log::info; use snafu::ensure; @@ -541,6 +541,9 @@ mod tests { use actix_web::test; use actix_web_httpauth::headers::authorization::Bearer; use geoengine_datatypes::raster::{GridShape2D, TilingSpecification}; + use geoengine_datatypes::test_data; + use geoengine_datatypes::util::ImageFormat; + use geoengine_datatypes::util::assert_image_equals_with_format; use tokio_postgres::NoTls; #[ge_context::test] @@ -749,10 +752,10 @@ mod tests { let res = send_test_request(req, app_ctx).await; assert_eq!(res.status(), 200, "{:?}", res.response()); - assert_eq!( - include_bytes!("../../../../test_data/raster/geotiff_from_stream_compressed.tiff") - as &[u8], - test::read_body(res).await.as_ref() + assert_image_equals_with_format( + test_data!("raster/geotiff_from_stream_compressed.tiff"), + test::read_body(res).await.as_ref(), + ImageFormat::Tiff, ); } diff --git a/services/src/api/handlers/wfs.rs b/services/src/api/handlers/wfs.rs index 45a14355c3..1a5b4ad399 100644 --- a/services/src/api/handlers/wfs.rs +++ b/services/src/api/handlers/wfs.rs @@ -1,15 +1,15 @@ use crate::api::model::datatypes::TimeInterval; -use crate::api::ogc::util::{ogc_endpoint_url, OgcProtocol, OgcRequestGuard}; +use crate::api::ogc::util::{OgcProtocol, OgcRequestGuard, ogc_endpoint_url}; use crate::api::ogc::wfs::request::{GetCapabilities, GetFeature}; use crate::config; use crate::config::get_config_element; use crate::contexts::{ApplicationContext, SessionContext}; use crate::error; use crate::error::Result; -use crate::util::server::{connection_closed, not_implemented_handler, CacheControlHeader}; +use crate::util::server::{CacheControlHeader, connection_closed, not_implemented_handler}; use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::{Workflow, WorkflowId}; -use actix_web::{web, FromRequest, HttpRequest, HttpResponse}; +use actix_web::{FromRequest, HttpRequest, HttpResponse, web}; use futures::future::BoxFuture; use futures_util::TryStreamExt; use geoengine_datatypes::collections::ToGeoJson; diff --git a/services/src/api/handlers/wms.rs b/services/src/api/handlers/wms.rs index 2737fe1a02..51cad2e26d 100644 --- a/services/src/api/handlers/wms.rs +++ b/services/src/api/handlers/wms.rs @@ -2,7 +2,7 @@ use crate::api::model::datatypes::{ RasterColorizer, SpatialReference, SpatialReferenceOption, TimeInterval, }; use crate::api::model::responses::ErrorResponse; -use crate::api::ogc::util::{ogc_endpoint_url, OgcProtocol, OgcRequestGuard}; +use crate::api::ogc::util::{OgcProtocol, OgcRequestGuard, ogc_endpoint_url}; use crate::api::ogc::wms::request::{ GetCapabilities, GetLegendGraphic, GetMap, GetMapExceptionFormat, }; @@ -11,10 +11,10 @@ use crate::config::get_config_element; use crate::contexts::{ApplicationContext, SessionContext}; use crate::error::Result; use crate::error::{self, Error}; -use crate::util::server::{connection_closed, not_implemented_handler, CacheControlHeader}; +use crate::util::server::{CacheControlHeader, connection_closed, not_implemented_handler}; use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::WorkflowId; -use actix_web::{web, FromRequest, HttpRequest, HttpResponse}; +use actix_web::{FromRequest, HttpRequest, HttpResponse, web}; use geoengine_datatypes::primitives::SpatialResolution; use geoengine_datatypes::primitives::{ AxisAlignedRectangle, RasterQueryRectangle, SpatialPartition2D, @@ -484,16 +484,17 @@ mod tests { use crate::api::model::responses::ErrorResponse; use crate::contexts::PostgresContext; use crate::contexts::Session; + use crate::datasets::DatasetName; use crate::datasets::listing::DatasetProvider; use crate::datasets::storage::DatasetStore; - use crate::datasets::DatasetName; use crate::ge_context; use crate::users::UserAuth; - use crate::util::tests::{admin_login, register_ndvi_workflow_helper}; use crate::util::tests::{ - check_allowed_http_methods, read_body_string, register_ndvi_workflow_helper_with_cache_ttl, - register_ne2_multiband_workflow, send_test_request, MockQueryContext, + MockQueryContext, check_allowed_http_methods, read_body_string, + register_ndvi_workflow_helper_with_cache_ttl, register_ne2_multiband_workflow, + send_test_request, }; + use crate::util::tests::{admin_login, register_ndvi_workflow_helper}; use actix_http::header::{self, CONTENT_TYPE}; use actix_web::dev::ServiceResponse; use actix_web::http::Method; diff --git a/services/src/api/handlers/workflows.rs b/services/src/api/handlers/workflows.rs index 0ca892e3da..54b9a3b524 100755 --- a/services/src/api/handlers/workflows.rs +++ b/services/src/api/handlers/workflows.rs @@ -5,7 +5,7 @@ use crate::api::ogc::util::{parse_bbox, parse_time}; use crate::config::get_config_element; use crate::contexts::{ApplicationContext, SessionContext}; use crate::datasets::listing::{DatasetProvider, Provenance, ProvenanceOutput}; -use crate::datasets::{schedule_raster_dataset_from_workflow_task, RasterDatasetFromWorkflow}; +use crate::datasets::{RasterDatasetFromWorkflow, schedule_raster_dataset_from_workflow_task}; use crate::error::Result; use crate::layers::storage::LayerProviderDb; use crate::util::parsing::{ @@ -15,7 +15,7 @@ use crate::util::workflows::validate_workflow; use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::{Workflow, WorkflowId}; use crate::workflows::{RasterWebsocketStreamHandler, VectorWebsocketStreamHandler}; -use actix_web::{web, FromRequest, HttpRequest, HttpResponse, Responder}; +use actix_web::{FromRequest, HttpRequest, HttpResponse, Responder, web}; use futures::future::join_all; use geoengine_datatypes::error::{BoxedResultExt, ErrorSource}; use geoengine_datatypes::primitives::{ @@ -33,7 +33,7 @@ use std::io::{Cursor, Write}; use std::sync::Arc; use utoipa::{IntoParams, ToSchema}; use uuid::Uuid; -use zip::{write::SimpleFileOptions, ZipWriter}; +use zip::{ZipWriter, write::SimpleFileOptions}; pub(crate) fn init_workflow_routes(cfg: &mut web::ServiceConfig) where @@ -667,12 +667,13 @@ mod tests { use crate::users::UserAuth; use crate::util::tests::admin_login; use crate::util::tests::{ - add_ndvi_to_datasets, check_allowed_http_methods, check_allowed_http_methods2, - read_body_string, register_ndvi_workflow_helper, send_test_request, TestDataUploads, + TestDataUploads, add_ndvi_to_datasets, check_allowed_http_methods, + check_allowed_http_methods2, read_body_string, register_ndvi_workflow_helper, + send_test_request, }; use crate::workflows::registry::WorkflowRegistry; use actix_web::dev::ServiceResponse; - use actix_web::{http::header, http::Method, test}; + use actix_web::{http::Method, http::header, test}; use actix_web_httpauth::headers::authorization::Bearer; use geoengine_datatypes::collections::MultiPointCollection; use geoengine_datatypes::primitives::CacheHint; @@ -682,6 +683,9 @@ mod tests { }; use geoengine_datatypes::raster::{GridShape, RasterDataType, TilingSpecification}; use geoengine_datatypes::spatial_reference::SpatialReference; + use geoengine_datatypes::test_data; + use geoengine_datatypes::util::ImageFormat; + use geoengine_datatypes::util::assert_image_equals_with_format; use geoengine_operators::engine::{ ExecutionContext, MultipleRasterOrSingleVectorSource, PlotOperator, RasterBandDescriptor, RasterBandDescriptors, TypedOperator, @@ -695,15 +699,15 @@ mod tests { use geoengine_operators::source::{GdalSource, GdalSourceParameters}; use geoengine_operators::util::input::MultiRasterOrVectorOperator::Raster; use geoengine_operators::util::raster_stream_to_geotiff::{ - single_timestep_raster_stream_to_geotiff_bytes, GdalGeoTiffDatasetMetadata, - GdalGeoTiffOptions, + GdalGeoTiffDatasetMetadata, GdalGeoTiffOptions, + single_timestep_raster_stream_to_geotiff_bytes, }; use serde_json::json; use std::io::Read; use std::sync::Arc; use tokio_postgres::NoTls; - use zip::read::ZipFile; use zip::ZipArchive; + use zip::read::ZipFile; async fn register_test_helper( app_ctx: PostgresContext, @@ -1480,10 +1484,10 @@ mod tests { .await .unwrap(); - assert_eq!( - include_bytes!("../../../../test_data/raster/geotiff_from_stream_compressed.tiff") - as &[u8], - result.as_slice() + assert_image_equals_with_format( + test_data!("raster/geotiff_from_stream_compressed.tiff"), + result.as_slice(), + ImageFormat::Tiff, ); } } diff --git a/services/src/api/model/datatypes.rs b/services/src/api/model/datatypes.rs index da11d280fa..0e262f6636 100644 --- a/services/src/api/model/datatypes.rs +++ b/services/src/api/model/datatypes.rs @@ -6,7 +6,7 @@ use geoengine_datatypes::primitives::{ }; use ordered_float::NotNan; use postgres_types::{FromSql, ToSql}; -use serde::{de::Visitor, Deserialize, Deserializer, Serialize, Serializer}; +use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Visitor}; use snafu::ResultExt; use std::{ collections::{BTreeMap, HashMap}, @@ -329,7 +329,7 @@ impl FromStr for SpatialReferenceAuthority { _ => { return Err(error::Error::InvalidSpatialReferenceString { spatial_reference_string: s.into(), - }) + }); } }) } @@ -2053,7 +2053,7 @@ impl From for (String, String) { impl From for geoengine_datatypes::util::StringPair { fn from(value: StringPair) -> Self { - Self::new(value.0 .0, value.0 .1) + Self::new(value.0.0, value.0.1) } } diff --git a/services/src/api/model/operators.rs b/services/src/api/model/operators.rs index 3196cf8598..7a0cab4237 100644 --- a/services/src/api/model/operators.rs +++ b/services/src/api/model/operators.rs @@ -1361,17 +1361,19 @@ mod tests { .unwrap() ); - assert!(serde_json::from_value::(json!([{ - "name": "foo", - "measurement": { - "type": "unitless" - } - },{ - "name": "foo", - "measurement": { - "type": "unitless" - } - }])) - .is_err()); + assert!( + serde_json::from_value::(json!([{ + "name": "foo", + "measurement": { + "type": "unitless" + } + },{ + "name": "foo", + "measurement": { + "type": "unitless" + } + }])) + .is_err() + ); } } diff --git a/services/src/api/model/responses/mod.rs b/services/src/api/model/responses/mod.rs index de558f88c7..70a98166ed 100644 --- a/services/src/api/model/responses/mod.rs +++ b/services/src/api/model/responses/mod.rs @@ -2,7 +2,7 @@ pub mod datasets; pub mod ml_models; use actix_http::StatusCode; -use actix_web::{dev::ServiceResponse, HttpResponse}; +use actix_web::{HttpResponse, dev::ServiceResponse}; use convert_case::{Converter, Pattern}; use serde::{Deserialize, Serialize}; use std::fmt; diff --git a/services/src/api/model/services.rs b/services/src/api/model/services.rs index 3a54ccf188..70827a9e07 100644 --- a/services/src/api/model/services.rs +++ b/services/src/api/model/services.rs @@ -2,9 +2,9 @@ use crate::api::model::operators::{ GdalMetaDataList, GdalMetaDataRegular, GdalMetaDataStatic, GdalMetadataNetCdfCf, MockMetaData, OgrMetaData, }; +use crate::datasets::DatasetName; use crate::datasets::storage::validate_tags; use crate::datasets::upload::{UploadId, VolumeName}; -use crate::datasets::DatasetName; use crate::projects::Symbology; use serde::{Deserialize, Serialize}; use utoipa::ToSchema; diff --git a/services/src/api/ogc/util.rs b/services/src/api/ogc/util.rs index a9616042b7..f71ec04e45 100644 --- a/services/src/api/ogc/util.rs +++ b/services/src/api/ogc/util.rs @@ -7,12 +7,12 @@ use serde::de::Error; use serde::{Deserialize, Serialize}; use snafu::ensure; use std::str::FromStr; -use utoipa::openapi::{ObjectBuilder, SchemaType}; use utoipa::ToSchema; +use utoipa::openapi::{ObjectBuilder, SchemaType}; use super::wcs::request::WcsBoundingbox; use super::wfs::request::WfsResolution; -use crate::api::handlers::spatial_references::{spatial_reference_specification, AxisOrder}; +use crate::api::handlers::spatial_references::{AxisOrder, spatial_reference_specification}; use crate::api::model::datatypes::TimeInterval; use crate::error::{self, Result}; use crate::workflows::workflow::WorkflowId; @@ -183,7 +183,7 @@ where _ => { return Err(D::Error::custom(format!( "cannot parse bbox from string: {s}" - ))) + ))); } }; @@ -321,8 +321,8 @@ impl Guard for OgcRequestGuard<'_> { #[cfg(test)] mod tests { use crate::api::model::datatypes::SpatialReferenceAuthority; - use serde::de::value::StringDeserializer; use serde::de::IntoDeserializer; + use serde::de::value::StringDeserializer; use super::*; diff --git a/services/src/api/ogc/wcs/request.rs b/services/src/api/ogc/wcs/request.rs index 74f6d0c485..80e935aa39 100644 --- a/services/src/api/ogc/wcs/request.rs +++ b/services/src/api/ogc/wcs/request.rs @@ -232,7 +232,7 @@ where #[cfg(test)] mod tests { - use serde::de::{value::StringDeserializer, IntoDeserializer}; + use serde::de::{IntoDeserializer, value::StringDeserializer}; use crate::api::model::datatypes::SpatialReferenceAuthority; diff --git a/services/src/api/ogc/wfs/request.rs b/services/src/api/ogc/wfs/request.rs index 86d134374e..c34c728706 100644 --- a/services/src/api/ogc/wfs/request.rs +++ b/services/src/api/ogc/wfs/request.rs @@ -1,6 +1,6 @@ use crate::api::model::datatypes::TimeInterval; use crate::api::ogc::util::{ - parse_ogc_bbox, parse_time_option, parse_wfs_resolution_option, OgcBoundingBox, + OgcBoundingBox, parse_ogc_bbox, parse_time_option, parse_wfs_resolution_option, }; use crate::util::from_str_option; use geoengine_datatypes::primitives::SpatialResolution; diff --git a/services/src/api/ogc/wms/request.rs b/services/src/api/ogc/wms/request.rs index 83516a79c0..91bcced779 100644 --- a/services/src/api/ogc/wms/request.rs +++ b/services/src/api/ogc/wms/request.rs @@ -1,5 +1,5 @@ use crate::api::model::datatypes::{SpatialReference, TimeInterval}; -use crate::api::ogc::util::{parse_ogc_bbox, parse_time_option, OgcBoundingBox}; +use crate::api::ogc::util::{OgcBoundingBox, parse_ogc_bbox, parse_time_option}; use crate::util::{bool_option_case_insensitive, from_str}; use serde::{Deserialize, Serialize}; use utoipa::{IntoParams, ToSchema}; diff --git a/services/src/bin/geoengine-cli.rs b/services/src/bin/geoengine-cli.rs index fdcda6d205..86b9125b8b 100644 --- a/services/src/bin/geoengine-cli.rs +++ b/services/src/bin/geoengine-cli.rs @@ -1,6 +1,6 @@ use clap::{Parser, Subcommand}; use geoengine_services::cli::{ - check_heartbeat, check_successful_startup, CheckSuccessfulStartup, Heartbeat, + CheckSuccessfulStartup, Heartbeat, check_heartbeat, check_successful_startup, }; /// CLI for Geo Engine Utilities diff --git a/services/src/bin/geoengine-server.rs b/services/src/bin/geoengine-server.rs index 9c036d0823..e5efd94363 100644 --- a/services/src/bin/geoengine-server.rs +++ b/services/src/bin/geoengine-server.rs @@ -5,14 +5,14 @@ use geoengine_services::config; use geoengine_services::config::get_config_element; use geoengine_services::error::Result; use tracing::Subscriber; +use tracing_subscriber::EnvFilter; +use tracing_subscriber::Layer; use tracing_subscriber::field::RecordFields; -use tracing_subscriber::fmt::format::{DefaultFields, Writer}; use tracing_subscriber::fmt::FormatFields; +use tracing_subscriber::fmt::format::{DefaultFields, Writer}; use tracing_subscriber::layer::Filter; use tracing_subscriber::prelude::*; use tracing_subscriber::registry::LookupSpan; -use tracing_subscriber::EnvFilter; -use tracing_subscriber::Layer; /// Starts the server. /// @@ -71,7 +71,7 @@ fn open_telemetry_layer( ) -> Result< tracing_opentelemetry::OpenTelemetryLayer< S, - impl opentelemetry::trace::Tracer + tracing_opentelemetry::PreSampledTracer, + impl opentelemetry::trace::Tracer + tracing_opentelemetry::PreSampledTracer + use, >, > where @@ -130,7 +130,7 @@ fn file_layer_with_filter + 'static>( filename_prefix: &str, log_directory: Option<&str>, filter: F, -) -> (impl Layer, FileLogWriterHandle) +) -> (impl Layer + use, FileLogWriterHandle) where S: Subscriber, for<'a> S: LookupSpan<'a>, @@ -196,7 +196,7 @@ fn reroute_gdal_logging() { fn configure_error_report_formatting(logging_config: &config::Logging) { if logging_config.raw_error_messages { // there is no way to configure snafu::Report other than through env variables - std::env::set_var("SNAFU_RAW_ERROR_MESSAGES", "1"); + unsafe { std::env::set_var("SNAFU_RAW_ERROR_MESSAGES", "1") }; } } diff --git a/services/src/cli/mod.rs b/services/src/cli/mod.rs index 2ca5d1ead0..3d7760b75c 100644 --- a/services/src/cli/mod.rs +++ b/services/src/cli/mod.rs @@ -1,5 +1,5 @@ mod check_successful_startup; mod heartbeat; -pub use check_successful_startup::{check_successful_startup, CheckSuccessfulStartup}; -pub use heartbeat::{check_heartbeat, Heartbeat}; +pub use check_successful_startup::{CheckSuccessfulStartup, check_successful_startup}; +pub use heartbeat::{Heartbeat, check_heartbeat}; diff --git a/services/src/contexts/db_types.rs b/services/src/contexts/db_types.rs index 7d5c3da44c..f21e538865 100644 --- a/services/src/contexts/db_types.rs +++ b/services/src/contexts/db_types.rs @@ -2,6 +2,8 @@ use crate::{ datasets::{ dataset_listing_provider::DatasetLayerListingProviderDefinition, external::{ + CopernicusDataspaceDataProviderDefinition, GdalRetries, + SentinelS2L2ACogsProviderDefinition, StacApiRetries, aruna::ArunaDataProviderDefinition, edr::{EdrDataProviderDefinition, EdrVectorSpec}, gbif::GbifDataProviderDefinition, @@ -9,8 +11,6 @@ use crate::{ gfbio_collections::GfbioCollectionsDataProviderDefinition, netcdfcf::{EbvPortalDataProviderDefinition, NetCdfCfDataProviderDefinition}, pangaea::PangaeaDataProviderDefinition, - CopernicusDataspaceDataProviderDefinition, GdalRetries, - SentinelS2L2ACogsProviderDefinition, StacApiRetries, }, listing::Provenance, storage::MetaDataDefinition, diff --git a/services/src/contexts/migrations/database_migration.rs b/services/src/contexts/migrations/database_migration.rs index cbca3db32e..9c0e79df6a 100644 --- a/services/src/contexts/migrations/database_migration.rs +++ b/services/src/contexts/migrations/database_migration.rs @@ -1,12 +1,12 @@ use crate::error::{Result, UnexpectedDatabaseVersionDuringMigration}; use async_trait::async_trait; -use bb8_postgres::{bb8::PooledConnection, PostgresConnectionManager}; +use bb8_postgres::{PostgresConnectionManager, bb8::PooledConnection}; use log::info; use snafu::ensure; use tokio_postgres::{ + Socket, Transaction, error::SqlState, tls::{MakeTlsConnect, TlsConnect}, - Socket, Transaction, }; pub type DatabaseVersion = String; @@ -195,8 +195,8 @@ mod tests { config::get_config_element, contexts::PostgresDb, contexts::{ - migrations::{all_migrations, CurrentSchemaMigration, Migration0015LogQuota}, SessionId, + migrations::{CurrentSchemaMigration, Migration0015LogQuota, all_migrations}, }, permissions::RoleId, projects::{ProjectDb, ProjectListOptions}, @@ -204,7 +204,7 @@ mod tests { util::postgres::DatabaseConnectionConfig, workflows::{registry::WorkflowRegistry, workflow::WorkflowId}, }; - use bb8_postgres::{bb8::Pool, PostgresConnectionManager}; + use bb8_postgres::{PostgresConnectionManager, bb8::Pool}; use geoengine_datatypes::{primitives::DateTime, test_data}; use tokio_postgres::NoTls; diff --git a/services/src/contexts/migrations/migration_0016_merge_providers.rs b/services/src/contexts/migrations/migration_0016_merge_providers.rs index 6fbccafc50..873a2da813 100644 --- a/services/src/contexts/migrations/migration_0016_merge_providers.rs +++ b/services/src/contexts/migrations/migration_0016_merge_providers.rs @@ -1,6 +1,6 @@ use super::{ - database_migration::{DatabaseVersion, Migration}, Migration0015LogQuota, + database_migration::{DatabaseVersion, Migration}, }; use crate::error::Result; use async_trait::async_trait; @@ -31,7 +31,7 @@ mod tests { use crate::contexts::migrations::all_migrations; use crate::util::postgres::DatabaseConnectionConfig; use crate::{config::get_config_element, contexts::migrate_database}; - use bb8_postgres::{bb8::Pool, PostgresConnectionManager}; + use bb8_postgres::{PostgresConnectionManager, bb8::Pool}; use tokio_postgres::NoTls; #[tokio::test(flavor = "multi_thread", worker_threads = 1)] diff --git a/services/src/contexts/migrations/mod.rs b/services/src/contexts/migrations/mod.rs index 5887553983..6bad12a8b8 100644 --- a/services/src/contexts/migrations/mod.rs +++ b/services/src/contexts/migrations/mod.rs @@ -3,7 +3,7 @@ pub use crate::contexts::migrations::{ migration_0016_merge_providers::Migration0016MergeProviders, }; pub use database_migration::{ - initialize_database, migrate_database, DatabaseVersion, Migration, MigrationResult, + DatabaseVersion, Migration, MigrationResult, initialize_database, migrate_database, }; mod current_schema; @@ -15,7 +15,7 @@ mod migration_0016_merge_providers; mod schema_info; #[cfg(test)] -pub(crate) use schema_info::{assert_migration_schema_eq, AssertSchemaEqPopulationConfig}; +pub(crate) use schema_info::{AssertSchemaEqPopulationConfig, assert_migration_schema_eq}; /// All migrations that are available. The migrations are applied in the order they are defined here, starting from the current version of the database. /// diff --git a/services/src/contexts/migrations/schema_info.rs b/services/src/contexts/migrations/schema_info.rs index a3883b3d4f..7ef5fb3dfe 100644 --- a/services/src/contexts/migrations/schema_info.rs +++ b/services/src/contexts/migrations/schema_info.rs @@ -1,6 +1,6 @@ use bb8_postgres::{ - bb8::{Pool, PooledConnection}, PostgresConnectionManager, + bb8::{Pool, PooledConnection}, }; use futures::future::BoxFuture; use tokio_postgres::{NoTls, Transaction}; diff --git a/services/src/contexts/mod.rs b/services/src/contexts/mod.rs index bb3c1b2d2d..113c68ec9b 100644 --- a/services/src/contexts/mod.rs +++ b/services/src/contexts/mod.rs @@ -1,5 +1,5 @@ use crate::api::model::services::Volume; -use crate::config::{get_config_element, Cache, QuotaTrackingMode}; +use crate::config::{Cache, QuotaTrackingMode, get_config_element}; use crate::datasets::external::netcdfcf::NetCdfCfProviderDb; use crate::datasets::storage::DatasetDb; use crate::error::Result; @@ -33,8 +33,8 @@ use tokio::sync::RwLock; use uuid::Uuid; pub use migrations::{ - initialize_database, migrate_database, CurrentSchemaMigration, DatabaseVersion, Migration, - MigrationResult, + CurrentSchemaMigration, DatabaseVersion, Migration, MigrationResult, initialize_database, + migrate_database, }; pub use postgres::PostgresDb; pub use postgres::{PostgresContext, PostgresSessionContext}; @@ -357,10 +357,10 @@ where ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { diff --git a/services/src/contexts/postgres.rs b/services/src/contexts/postgres.rs index 0bf204ab97..426d1d36bc 100644 --- a/services/src/contexts/postgres.rs +++ b/services/src/contexts/postgres.rs @@ -1,14 +1,14 @@ use self::migrations::all_migrations; use crate::api::model::services::Volume; -use crate::config::{get_config_element, Cache, Oidc, Quota}; +use crate::config::{Cache, Oidc, Quota, get_config_element}; use crate::contexts::{ - initialize_database, migrations, ApplicationContext, CurrentSchemaMigration, MigrationResult, - QueryContextImpl, SessionId, + ApplicationContext, CurrentSchemaMigration, MigrationResult, QueryContextImpl, SessionId, + initialize_database, migrations, }; use crate::contexts::{ExecutionContextImpl, QuotaCheckerImpl}; use crate::contexts::{GeoEngineDb, SessionContext}; -use crate::datasets::upload::Volumes; use crate::datasets::DatasetName; +use crate::datasets::upload::Volumes; use crate::error::{self, Error, Result}; use crate::layers::add_from_directory::{ add_datasets_from_directory, add_layer_collections_from_directory, add_layers_from_directory, @@ -16,17 +16,17 @@ use crate::layers::add_from_directory::{ }; use crate::machine_learning::error::MachineLearningError; use crate::machine_learning::name::MlModelName; -use crate::quota::{initialize_quota_tracking, QuotaTrackingFactory}; +use crate::quota::{QuotaTrackingFactory, initialize_quota_tracking}; use crate::tasks::SimpleTaskManagerContext; use crate::tasks::{TypedTaskManagerBackend, UserTaskManager}; use crate::users::OidcManager; use crate::users::{UserAuth, UserSession}; use async_trait::async_trait; use bb8_postgres::{ + PostgresConnectionManager, bb8::Pool, bb8::PooledConnection, - tokio_postgres::{tls::MakeTlsConnect, tls::TlsConnect, Config, Socket}, - PostgresConnectionManager, + tokio_postgres::{Config, Socket, tls::MakeTlsConnect, tls::TlsConnect}, }; use geoengine_datatypes::raster::TilingSpecification; use geoengine_datatypes::util::test::TestDefault; @@ -238,7 +238,7 @@ where .await?; } DatabaseStatus::InitializedKeepDatabase if postgres_config.clear_database_on_start => { - return Err(Error::ClearDatabaseOnStartupNotAllowed) + return Err(Error::ClearDatabaseOnStartupNotAllowed); } DatabaseStatus::InitializedClearDatabase | DatabaseStatus::InitializedKeepDatabase @@ -481,8 +481,8 @@ mod tests { LayerCollectionId, LayerCollectionProvider, SearchParameters, SearchType, }; use crate::layers::storage::{ - LayerDb, LayerProviderDb, LayerProviderListing, LayerProviderListingOptions, - INTERNAL_PROVIDER_ID, + INTERNAL_PROVIDER_ID, LayerDb, LayerProviderDb, LayerProviderListing, + LayerProviderListingOptions, }; use crate::machine_learning::{MlModel, MlModelDb, MlModelIdAndName, MlModelMetadata}; use crate::permissions::{Permission, PermissionDb, Role, RoleDescription, RoleId}; @@ -493,8 +493,8 @@ mod tests { }; use crate::users::{OidcTokens, SessionTokenStore}; use crate::users::{RoleDb, UserClaims, UserCredentials, UserDb, UserId, UserRegistration}; - use crate::util::tests::mock_oidc::{mock_refresh_server, MockRefreshServerConfig}; - use crate::util::tests::{admin_login, register_ndvi_workflow_helper, MockQuotaTracking}; + use crate::util::tests::mock_oidc::{MockRefreshServerConfig, mock_refresh_server}; + use crate::util::tests::{MockQuotaTracking, admin_login, register_ndvi_workflow_helper}; use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::Workflow; use bb8_postgres::tokio_postgres::NoTls; @@ -659,10 +659,11 @@ mod tests { ) { let db = app_ctx.session_context(session.clone()).db(); - assert!(db - .has_permission(project_id, Permission::Owner) - .await - .unwrap()); + assert!( + db.has_permission(project_id, Permission::Owner) + .await + .unwrap() + ); let user2 = app_ctx .register_user(UserRegistration { @@ -682,19 +683,21 @@ mod tests { .unwrap(); let db2 = app_ctx.session_context(session2.clone()).db(); - assert!(!db2 - .has_permission(project_id, Permission::Owner) - .await - .unwrap()); + assert!( + !db2.has_permission(project_id, Permission::Owner) + .await + .unwrap() + ); db.add_permission(user2.into(), project_id, Permission::Read) .await .unwrap(); - assert!(db2 - .has_permission(project_id, Permission::Read) - .await - .unwrap()); + assert!( + db2.has_permission(project_id, Permission::Read) + .await + .unwrap() + ); } #[allow(clippy::too_many_lines)] @@ -3245,22 +3248,9 @@ mod tests { let root_collection_id = layer_db.get_root_layer_collection_id().await.unwrap(); if capabilities.search_types.fulltext { - assert!(layer_db - .search( - &root_collection_id, - SearchParameters { - search_type: SearchType::Fulltext, - search_string: String::new(), - limit: 10, - offset: 0, - }, - ) - .await - .is_ok()); - - if capabilities.autocomplete { - assert!(layer_db - .autocomplete_search( + assert!( + layer_db + .search( &root_collection_id, SearchParameters { search_type: SearchType::Fulltext, @@ -3270,39 +3260,45 @@ mod tests { }, ) .await - .is_ok()); + .is_ok() + ); + + if capabilities.autocomplete { + assert!( + layer_db + .autocomplete_search( + &root_collection_id, + SearchParameters { + search_type: SearchType::Fulltext, + search_string: String::new(), + limit: 10, + offset: 0, + }, + ) + .await + .is_ok() + ); } else { - assert!(layer_db - .autocomplete_search( - &root_collection_id, - SearchParameters { - search_type: SearchType::Fulltext, - search_string: String::new(), - limit: 10, - offset: 0, - }, - ) - .await - .is_err()); + assert!( + layer_db + .autocomplete_search( + &root_collection_id, + SearchParameters { + search_type: SearchType::Fulltext, + search_string: String::new(), + limit: 10, + offset: 0, + }, + ) + .await + .is_err() + ); } } if capabilities.search_types.prefix { - assert!(layer_db - .search( - &root_collection_id, - SearchParameters { - search_type: SearchType::Prefix, - search_string: String::new(), - limit: 10, - offset: 0, - }, - ) - .await - .is_ok()); - - if capabilities.autocomplete { - assert!(layer_db - .autocomplete_search( + assert!( + layer_db + .search( &root_collection_id, SearchParameters { search_type: SearchType::Prefix, @@ -3312,20 +3308,39 @@ mod tests { }, ) .await - .is_ok()); + .is_ok() + ); + + if capabilities.autocomplete { + assert!( + layer_db + .autocomplete_search( + &root_collection_id, + SearchParameters { + search_type: SearchType::Prefix, + search_string: String::new(), + limit: 10, + offset: 0, + }, + ) + .await + .is_ok() + ); } else { - assert!(layer_db - .autocomplete_search( - &root_collection_id, - SearchParameters { - search_type: SearchType::Prefix, - search_string: String::new(), - limit: 10, - offset: 0, - }, - ) - .await - .is_err()); + assert!( + layer_db + .autocomplete_search( + &root_collection_id, + SearchParameters { + search_type: SearchType::Prefix, + search_string: String::new(), + limit: 10, + offset: 0, + }, + ) + .await + .is_err() + ); } } } diff --git a/services/src/datasets/create_from_workflow.rs b/services/src/datasets/create_from_workflow.rs index 0c107d4eea..847e42a309 100644 --- a/services/src/datasets/create_from_workflow.rs +++ b/services/src/datasets/create_from_workflow.rs @@ -1,9 +1,9 @@ use crate::api::model::datatypes::RasterQueryRectangle; use crate::contexts::SessionContext; +use crate::datasets::AddDataset; use crate::datasets::listing::DatasetProvider; use crate::datasets::storage::{DatasetDefinition, DatasetStore, MetaDataDefinition}; use crate::datasets::upload::{UploadId, UploadRootPath}; -use crate::datasets::AddDataset; use crate::error; use crate::tasks::{Task, TaskId, TaskManager, TaskStatusInfo}; use crate::workflows::workflow::{Workflow, WorkflowId}; @@ -19,11 +19,11 @@ use geoengine_operators::source::{ GdalLoadingInfoTemporalSlice, GdalMetaDataList, GdalMetaDataStatic, }; use geoengine_operators::util::raster_stream_to_geotiff::{ - raster_stream_to_geotiff, GdalCompressionNumThreads, GdalGeoTiffDatasetMetadata, - GdalGeoTiffOptions, + GdalCompressionNumThreads, GdalGeoTiffDatasetMetadata, GdalGeoTiffOptions, + raster_stream_to_geotiff, }; use serde::{Deserialize, Serialize}; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use std::path::PathBuf; use std::sync::Arc; use tokio::fs; diff --git a/services/src/datasets/dataset_listing_provider.rs b/services/src/datasets/dataset_listing_provider.rs index 5bc26cee3e..3820c2267e 100644 --- a/services/src/datasets/dataset_listing_provider.rs +++ b/services/src/datasets/dataset_listing_provider.rs @@ -371,10 +371,10 @@ where ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { @@ -454,7 +454,7 @@ mod tests { use crate::{ contexts::SessionContext, contexts::{PostgresContext, PostgresDb, PostgresSessionContext}, - datasets::{storage::DatasetStore, AddDataset}, + datasets::{AddDataset, storage::DatasetStore}, ge_context, layers::storage::LayerProviderDb, }; @@ -578,18 +578,20 @@ mod tests { vec!["GdalDataset"] ); - assert!(provider - .search( - &layer_collection_id_root, - SearchParameters { - search_string: "Gdal".to_string(), - search_type: SearchType::Prefix, - offset: 0, - limit: 10, - }, - ) - .await - .is_err()); + assert!( + provider + .search( + &layer_collection_id_root, + SearchParameters { + search_string: "Gdal".to_string(), + search_type: SearchType::Prefix, + offset: 0, + limit: 10, + }, + ) + .await + .is_err() + ); } #[ge_context::test(user = "admin")] @@ -678,18 +680,20 @@ mod tests { vec!["GdalDataset"] ); - assert!(provider - .autocomplete_search( - &layer_collection_id_root, - SearchParameters { - search_string: "Gdal".to_string(), - search_type: SearchType::Prefix, - offset: 0, - limit: 10, - }, - ) - .await - .is_err()); + assert!( + provider + .autocomplete_search( + &layer_collection_id_root, + SearchParameters { + search_string: "Gdal".to_string(), + search_type: SearchType::Prefix, + offset: 0, + limit: 10, + }, + ) + .await + .is_err() + ); } async fn add_two_datasets(db: &PostgresDb) { diff --git a/services/src/datasets/external/aruna/mock_grpc_server.rs b/services/src/datasets/external/aruna/mock_grpc_server.rs index 2923973e05..ab650be28b 100644 --- a/services/src/datasets/external/aruna/mock_grpc_server.rs +++ b/services/src/datasets/external/aruna/mock_grpc_server.rs @@ -8,10 +8,10 @@ use std::hash::Hash; use std::marker::PhantomData; use std::net::{SocketAddr, TcpListener, TcpStream}; use std::sync::Arc; +use tonic::Code; use tonic::codec::ProstCodec; -use tonic::codegen::{http, Body, BoxFuture}; +use tonic::codegen::{Body, BoxFuture, http}; use tonic::transport::server::Router; -use tonic::Code; pub type InfallibleHttpResponseFuture = tonic::codegen::BoxFuture, Infallible>; @@ -94,11 +94,11 @@ where } impl< - Key: Clone + Eq + Hash + Send + 'static, - RequestMessage: Message + Default + Clone + 'static, - ResponseMessage: Message + Clone + 'static, - F: FnMut(RequestMessage) -> Key + Clone + Send + 'static, - > MapResponseService + Key: Clone + Eq + Hash + Send + 'static, + RequestMessage: Message + Default + Clone + 'static, + ResponseMessage: Message + Clone + 'static, + F: FnMut(RequestMessage) -> Key + Clone + Send + 'static, +> MapResponseService { pub fn new(item: HashMap, key_func: F) -> Self { MapResponseService { diff --git a/services/src/datasets/external/aruna/mod.rs b/services/src/datasets/external/aruna/mod.rs index 8197ad11f2..db8a048cd2 100644 --- a/services/src/datasets/external/aruna/mod.rs +++ b/services/src/datasets/external/aruna/mod.rs @@ -668,10 +668,10 @@ impl ) -> geoengine_operators::util::Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, > { Err(geoengine_operators::error::Error::NotYetImplemented) @@ -1062,8 +1062,8 @@ mod tests { use std::str::FromStr; use std::task::Poll; - use aruna_rust_api::api::storage::models::v2::relation::Relation as ArunaRelationDirection; use aruna_rust_api::api::storage::models::v2::Relation as ArunaRelationStruct; + use aruna_rust_api::api::storage::models::v2::relation::Relation as ArunaRelationDirection; use aruna_rust_api::api::storage::models::v2::{ DataClass, Dataset, InternalRelation, InternalRelationVariant, KeyValue, KeyValueVariant, Object, Project, RelationDirection, ResourceVariant, Status, @@ -1075,14 +1075,14 @@ mod tests { }; use futures::StreamExt; use httptest::responders::status_code; - use httptest::{responders, Expectation, Server}; - use serde_json::{json, Value}; + use httptest::{Expectation, Server, responders}; + use serde_json::{Value, json}; use tokio::fs::File; use tokio::io::AsyncReadExt; + use tonic::Code; use tonic::codegen::http::Request; - use tonic::codegen::{http, Body, Service}; + use tonic::codegen::{Body, Service, http}; use tonic::transport::server::Router; - use tonic::Code; use geoengine_datatypes::collections::{FeatureCollectionInfos, MultiPointCollection}; use geoengine_datatypes::dataset::{DataId, DataProviderId, ExternalDataId, LayerId}; diff --git a/services/src/datasets/external/copernicus_dataspace/ids.rs b/services/src/datasets/external/copernicus_dataspace/ids.rs index d8777263fc..5ac53c5e2b 100644 --- a/services/src/datasets/external/copernicus_dataspace/ids.rs +++ b/services/src/datasets/external/copernicus_dataspace/ids.rs @@ -422,7 +422,7 @@ impl From for NamedData { match id.0 { CopernicusDataspaceLayerId::Sentinel2(sentinel2) => NamedData { namespace: None, - provider: Some(id.1 .0.to_string()), + provider: Some(id.1.0.to_string()), name: format!("datasets/{sentinel2}"), }, } diff --git a/services/src/datasets/external/copernicus_dataspace/provider.rs b/services/src/datasets/external/copernicus_dataspace/provider.rs index 52129a382e..84e06ec109 100644 --- a/services/src/datasets/external/copernicus_dataspace/provider.rs +++ b/services/src/datasets/external/copernicus_dataspace/provider.rs @@ -442,10 +442,10 @@ impl ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { diff --git a/services/src/datasets/external/copernicus_dataspace/sentinel2.rs b/services/src/datasets/external/copernicus_dataspace/sentinel2.rs index 17ff2e3793..c0120018ca 100644 --- a/services/src/datasets/external/copernicus_dataspace/sentinel2.rs +++ b/services/src/datasets/external/copernicus_dataspace/sentinel2.rs @@ -18,8 +18,8 @@ use geoengine_operators::{ GdalLoadingInfoTemporalSliceIterator, }, util::{ - gdal::{gdal_open_dataset_ex, gdal_parameters_from_dataset}, TemporaryGdalThreadLocalConfigOptions, + gdal::{gdal_open_dataset_ex, gdal_parameters_from_dataset}, }, }; @@ -293,10 +293,9 @@ mod tests { }; use geoengine_operators::source::{FileNotFoundHandling, GdalDatasetGeoTransform}; use httptest::{ - all_of, + Expectation, Server, all_of, matchers::{contains, request, url_decoded}, responders::status_code, - Expectation, Server, }; use crate::datasets::external::copernicus_dataspace::ids::{L2ABand, UtmZoneDirection}; @@ -440,12 +439,12 @@ mod tests { .unwrap(); add_partial_responses( - &mock_server, - "/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B02_10m.jp2", - &[(0, 16383)], - 132_214_802, - b02_jp2_head_body, - ); + &mock_server, + "/eodata/Sentinel-2/MSI/L2A_N0500/2020/07/03/S2A_MSIL2A_20200703T103031_N0500_R108_T32UMB_20230321T201840.SAFE/GRANULE/L2A_T32UMB_A026274_20200703T103027/IMG_DATA/R10m/T32UMB_20200703T103031_B02_10m.jp2", + &[(0, 16383)], + 132_214_802, + b02_jp2_head_body, + ); // B08 let b08_jp2_head_body = tokio::fs::read(test_data!( diff --git a/services/src/datasets/external/copernicus_dataspace/stac.rs b/services/src/datasets/external/copernicus_dataspace/stac.rs index 58ccfc1e57..39ea96901f 100644 --- a/services/src/datasets/external/copernicus_dataspace/stac.rs +++ b/services/src/datasets/external/copernicus_dataspace/stac.rs @@ -151,7 +151,9 @@ pub async fn load_stac_items( // there may be more items available, so go to next page, if possible if page >= MAX_NUM_PAGES { - log::warn!("Copernicus Data Provider reached maximum number of pages of the STAC API and there may be more items available. This may lead to incomplete results. Try shorter queries."); + log::warn!( + "Copernicus Data Provider reached maximum number of pages of the STAC API and there may be more items available. This may lead to incomplete results. Try shorter queries." + ); break; } diff --git a/services/src/datasets/external/edr.rs b/services/src/datasets/external/edr.rs index e9d8fec79f..7d8ee757b0 100644 --- a/services/src/datasets/external/edr.rs +++ b/services/src/datasets/external/edr.rs @@ -34,8 +34,8 @@ use geoengine_operators::source::{ OgrSourceDataset, OgrSourceDatasetTimeType, OgrSourceDurationSpec, OgrSourceErrorSpec, OgrSourceParameters, OgrSourceTimeFormat, }; -use geoengine_operators::util::gdal::gdal_open_dataset; use geoengine_operators::util::TemporaryGdalThreadLocalConfigOptions; +use geoengine_operators::util::gdal::gdal_open_dataset; use reqwest::Client; use serde::{Deserialize, Serialize}; use snafu::prelude::*; @@ -865,7 +865,7 @@ impl TryFrom for LayerCollectionId { parameter, } => format!("collections!{collection}!{parameter}"), EdrCollectionId::ParameterAndHeight { .. } => { - return Err(Error::InvalidLayerCollectionId) + return Err(Error::InvalidLayerCollectionId); } }; @@ -1034,10 +1034,10 @@ impl ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { @@ -1222,7 +1222,7 @@ mod tests { util::gdal::hide_gdal_errors, }; use geoengine_operators::{engine::ResultDescriptor, source::GdalDatasetGeoTransform}; - use httptest::{matchers::*, responders::status_code, Expectation, Server}; + use httptest::{Expectation, Server, matchers::*, responders::status_code}; use std::{ops::Range, path::PathBuf}; use tokio_postgres::NoTls; diff --git a/services/src/datasets/external/gbif.rs b/services/src/datasets/external/gbif.rs index c84a0f51a2..3712aa63b2 100644 --- a/services/src/datasets/external/gbif.rs +++ b/services/src/datasets/external/gbif.rs @@ -13,8 +13,8 @@ use crate::layers::listing::{ use crate::util::postgres::DatabaseConnectionConfig; use crate::workflows::workflow::Workflow; use async_trait::async_trait; -use bb8_postgres::bb8::Pool; use bb8_postgres::PostgresConnectionManager; +use bb8_postgres::bb8::Pool; use chrono::NaiveDateTime; use geoengine_datatypes::collections::VectorDataType; use geoengine_datatypes::dataset::{DataId, DataProviderId, LayerId}; @@ -38,7 +38,7 @@ use postgres_types::{FromSql, ToSql}; use serde::{Deserialize, Serialize}; use snafu::ensure; use std::fmt::Write; -use tokio::time::{timeout, Duration}; +use tokio::time::{Duration, timeout}; use tokio_postgres::NoTls; pub const GBIF_PROVIDER_ID: DataProviderId = @@ -933,10 +933,10 @@ impl ) -> geoengine_operators::util::Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, > { Err(geoengine_operators::error::Error::NotYetImplemented) @@ -2215,91 +2215,91 @@ mod tests { .sort(); let expected = OgrSourceDataset { - file_name: PathBuf::from(ogr_pg_string), - layer_name: format!("{0}.occurrences", db_config.schema), - data_type: Some(VectorDataType::MultiPoint), - time: OgrSourceDatasetTimeType::Start { - start_field: "eventdate".to_string(), - start_format: OgrSourceTimeFormat::Auto, - duration: OgrSourceDurationSpec::Zero, - }, - default_geometry: None, - columns: Some(OgrSourceColumnSpec { - format_specifics: None, - x: String::new(), - y: None, - int: vec![ - "gbifid".to_string(), - "individualcount".to_string(), - "day".to_string(), - "month".to_string(), - "year".to_string(), - "taxonkey".to_string(), - ], - float: vec![ - "decimallatitude".to_string(), - "decimallongitude".to_string(), - "coordinateuncertaintyinmeters".to_string(), - "elevation".to_string(), - ], - text: vec![ - "basisofrecord".to_string(), - "catalognumber".to_string(), - "class".to_string(), - "collectioncode".to_string(), - "coordinateprecision".to_string(), - "countrycode".to_string(), - "datasetkey".to_string(), - "dateidentified".to_string(), - "depthaccuracy".to_string(), - "elevationaccuracy".to_string(), - "establishmentmeans".to_string(), - "family".to_string(), - "genus".to_string(), - "identifiedby".to_string(), - "infraspecificepithet".to_string(), - "institutioncode".to_string(), - "issue".to_string(), - "kingdom".to_string(), - "lastinterpreted".to_string(), - "license".to_string(), - "locality".to_string(), - "mediatype".to_string(), - "occurrenceid".to_string(), - "occurrencestatus".to_string(), - "order".to_string(), - "phylum".to_string(), - "publishingorgkey".to_string(), - "recordedby".to_string(), - "recordnumber".to_string(), - "rightsholder".to_string(), - "scientificname".to_string(), - "species".to_string(), - "specieskey".to_string(), - "stateprovince".to_string(), - "taxonrank".to_string(), - "typestatus".to_string(), - "verbatimscientificname".to_string(), - "verbatimscientificnameauthorship".to_string(), - ], - bool: vec![], - datetime: vec![], - rename: None, - }), - force_ogr_time_filter: true, - force_ogr_spatial_filter: false, - on_error: OgrSourceErrorSpec::Ignore, - sql_query: Some(format!( - "SELECT {} geom, eventdate FROM {}.occurrences WHERE species = 'Rhipidia willistoniana'", - GbifDataProvider::all_columns() - .iter() - .map(|column| format!(r#""{column}","#)) - .join(""), - db_config.schema - )), - attribute_query: None, - cache_ttl: CacheTtlSeconds::default(), - }; + file_name: PathBuf::from(ogr_pg_string), + layer_name: format!("{0}.occurrences", db_config.schema), + data_type: Some(VectorDataType::MultiPoint), + time: OgrSourceDatasetTimeType::Start { + start_field: "eventdate".to_string(), + start_format: OgrSourceTimeFormat::Auto, + duration: OgrSourceDurationSpec::Zero, + }, + default_geometry: None, + columns: Some(OgrSourceColumnSpec { + format_specifics: None, + x: String::new(), + y: None, + int: vec![ + "gbifid".to_string(), + "individualcount".to_string(), + "day".to_string(), + "month".to_string(), + "year".to_string(), + "taxonkey".to_string(), + ], + float: vec![ + "decimallatitude".to_string(), + "decimallongitude".to_string(), + "coordinateuncertaintyinmeters".to_string(), + "elevation".to_string(), + ], + text: vec![ + "basisofrecord".to_string(), + "catalognumber".to_string(), + "class".to_string(), + "collectioncode".to_string(), + "coordinateprecision".to_string(), + "countrycode".to_string(), + "datasetkey".to_string(), + "dateidentified".to_string(), + "depthaccuracy".to_string(), + "elevationaccuracy".to_string(), + "establishmentmeans".to_string(), + "family".to_string(), + "genus".to_string(), + "identifiedby".to_string(), + "infraspecificepithet".to_string(), + "institutioncode".to_string(), + "issue".to_string(), + "kingdom".to_string(), + "lastinterpreted".to_string(), + "license".to_string(), + "locality".to_string(), + "mediatype".to_string(), + "occurrenceid".to_string(), + "occurrencestatus".to_string(), + "order".to_string(), + "phylum".to_string(), + "publishingorgkey".to_string(), + "recordedby".to_string(), + "recordnumber".to_string(), + "rightsholder".to_string(), + "scientificname".to_string(), + "species".to_string(), + "specieskey".to_string(), + "stateprovince".to_string(), + "taxonrank".to_string(), + "typestatus".to_string(), + "verbatimscientificname".to_string(), + "verbatimscientificnameauthorship".to_string(), + ], + bool: vec![], + datetime: vec![], + rename: None, + }), + force_ogr_time_filter: true, + force_ogr_spatial_filter: false, + on_error: OgrSourceErrorSpec::Ignore, + sql_query: Some(format!( + "SELECT {} geom, eventdate FROM {}.occurrences WHERE species = 'Rhipidia willistoniana'", + GbifDataProvider::all_columns() + .iter() + .map(|column| format!(r#""{column}","#)) + .join(""), + db_config.schema + )), + attribute_query: None, + cache_ttl: CacheTtlSeconds::default(), + }; if loading_info != expected { return Err(format!("{result_descriptor:?} != {expected:?}")); diff --git a/services/src/datasets/external/gfbio_abcd.rs b/services/src/datasets/external/gfbio_abcd.rs index 1d2f8f4ba8..5af9332b53 100644 --- a/services/src/datasets/external/gfbio_abcd.rs +++ b/services/src/datasets/external/gfbio_abcd.rs @@ -14,9 +14,9 @@ use crate::layers::listing::{ use crate::util::postgres::DatabaseConnectionConfig; use crate::workflows::workflow::Workflow; use async_trait::async_trait; +use bb8_postgres::PostgresConnectionManager; use bb8_postgres::bb8::{Pool, PooledConnection}; use bb8_postgres::tokio_postgres::NoTls; -use bb8_postgres::PostgresConnectionManager; use geoengine_datatypes::collections::VectorDataType; use geoengine_datatypes::dataset::{DataId, DataProviderId, LayerId}; use geoengine_datatypes::primitives::CacheTtlSeconds; @@ -628,10 +628,10 @@ impl ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { diff --git a/services/src/datasets/external/gfbio_collections.rs b/services/src/datasets/external/gfbio_collections.rs index f9109fef9e..1628fad882 100644 --- a/services/src/datasets/external/gfbio_collections.rs +++ b/services/src/datasets/external/gfbio_collections.rs @@ -15,9 +15,9 @@ use crate::layers::listing::{ use crate::util::postgres::DatabaseConnectionConfig; use crate::workflows::workflow::Workflow; use async_trait::async_trait; +use bb8_postgres::PostgresConnectionManager; use bb8_postgres::bb8::Pool; use bb8_postgres::tokio_postgres::NoTls; -use bb8_postgres::PostgresConnectionManager; use futures::future::join_all; use geoengine_datatypes::collections::VectorDataType; use geoengine_datatypes::dataset::{DataId, DataProviderId, LayerId}; @@ -38,7 +38,7 @@ use geoengine_operators::{ mock::MockDatasetDataSourceLoadingInfo, source::{GdalLoadingInfo, OgrSourceDataset}, }; -use reqwest::{header, Client}; +use reqwest::{Client, header}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::fmt; @@ -744,10 +744,10 @@ impl ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { @@ -823,10 +823,9 @@ mod tests { test_data, }; use httptest::{ - all_of, + Expectation, Server, all_of, matchers::{contains, lowercase, request}, responders::status_code, - Expectation, Server, }; use rand::RngCore; use std::{fs::File, io::Read, path::PathBuf}; diff --git a/services/src/datasets/external/netcdfcf/database.rs b/services/src/datasets/external/netcdfcf/database.rs index 224947725f..ec2ed91442 100644 --- a/services/src/datasets/external/netcdfcf/database.rs +++ b/services/src/datasets/external/netcdfcf/database.rs @@ -1,19 +1,18 @@ use super::{ - determine_data_range_and_colorizer, error, + NetCdfCf4DDatasetId, NetCdfCf4DProviderError, NetCdfEntity, NetCdfGroup, NetCdfOverview, + Result, determine_data_range_and_colorizer, error, loading::{ - create_layer, create_layer_collection_from_parts, LayerCollectionIdFn, LayerCollectionParts, + LayerCollectionIdFn, LayerCollectionParts, create_layer, create_layer_collection_from_parts, }, metadata::{Creator, DataRange, NetCdfGroupMetadata, NetCdfOverviewMetadata}, overviews::LoadingInfoMetadata, - NetCdfCf4DDatasetId, NetCdfCf4DProviderError, NetCdfEntity, NetCdfGroup, NetCdfOverview, - Result, }; use crate::layers::{ layer::{Layer, LayerCollection, LayerCollectionListOptions, ProviderLayerId}, listing::LayerCollectionId, }; use async_trait::async_trait; -use bb8_postgres::{bb8::PooledConnection, PostgresConnectionManager}; +use bb8_postgres::{PostgresConnectionManager, bb8::PooledConnection}; use geoengine_datatypes::{ dataset::{DataProviderId, LayerId}, error::BoxedResultExt, @@ -23,8 +22,8 @@ use geoengine_operators::source::GdalMetaDataList; use snafu::ResultExt; use std::sync::Arc; use tokio_postgres::{ - tls::{MakeTlsConnect, TlsConnect}, Socket, Transaction, + tls::{MakeTlsConnect, TlsConnect}, }; #[async_trait] @@ -633,7 +632,7 @@ async fn entities( file_name: &str, offset: u32, limit: u32, -) -> Result> { +) -> Result + use<>> { let entities = transaction .query( " diff --git a/services/src/datasets/external/netcdfcf/ebvportal_api.rs b/services/src/datasets/external/netcdfcf/ebvportal_api.rs index 3bc631d38f..489ae7c0a5 100644 --- a/services/src/datasets/external/netcdfcf/ebvportal_api.rs +++ b/services/src/datasets/external/netcdfcf/ebvportal_api.rs @@ -2,7 +2,7 @@ //! //! Connects to . -use crate::datasets::external::netcdfcf::{error, NetCdfOverview}; +use crate::datasets::external::netcdfcf::{NetCdfOverview, error}; use crate::error::Result; use error::NetCdfCf4DProviderError; use geoengine_datatypes::dataset::DataProviderId; diff --git a/services/src/datasets/external/netcdfcf/ebvportal_provider.rs b/services/src/datasets/external/netcdfcf/ebvportal_provider.rs index 1074a3f7d8..71bf84db1b 100644 --- a/services/src/datasets/external/netcdfcf/ebvportal_provider.rs +++ b/services/src/datasets/external/netcdfcf/ebvportal_provider.rs @@ -1,7 +1,7 @@ use super::{ + NetCdfCfDataProvider, NetCdfCfDataProviderDefinition, NetCdfLayerCollectionId, ebvportal_api::EbvPortalApi, loading::LayerCollectionIdFn, netcdf_entity_to_layer_id, - netcdf_group_to_layer_collection_id, NetCdfCfDataProvider, NetCdfCfDataProviderDefinition, - NetCdfLayerCollectionId, + netcdf_group_to_layer_collection_id, }; use crate::{ contexts::GeoEngineDb, @@ -209,7 +209,7 @@ impl TryFrom for LayerCollectionId { groups, } => format!("classes/{}/{}/{}/{}", class, ebv, dataset, groups.join("/")), EbvCollectionId::Entity { .. } => { - return Err(crate::error::Error::InvalidLayerCollectionId) + return Err(crate::error::Error::InvalidLayerCollectionId); } }; @@ -515,7 +515,7 @@ impl LayerCollectionProvider for EbvPortalDataProvider { layer_collection.entry_label = layer_collection .items .first() - .map_or(true, |item| matches!(item, CollectionItem::Layer(_))) + .is_none_or(|item| matches!(item, CollectionItem::Layer(_))) .then_some("Entity".to_string()) .or_else(|| Some("Metric".to_string())); @@ -668,10 +668,10 @@ impl ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { @@ -705,7 +705,7 @@ mod tests { layers::layer::{LayerListing, ProviderLayerId}, }; use geoengine_datatypes::test_data; - use httptest::{matchers::request, responders::status_code, Expectation}; + use httptest::{Expectation, matchers::request, responders::status_code}; use std::str::FromStr; use tokio_postgres::NoTls; diff --git a/services/src/datasets/external/netcdfcf/loading.rs b/services/src/datasets/external/netcdfcf/loading.rs index 3fad712a74..9c10e33e7d 100644 --- a/services/src/datasets/external/netcdfcf/loading.rs +++ b/services/src/datasets/external/netcdfcf/loading.rs @@ -1,7 +1,6 @@ use super::{ - error, + NetCdfEntity, Result, error, metadata::{Creator, DataRange, NetCdfGroupMetadata, NetCdfOverviewMetadata}, - NetCdfEntity, Result, }; use crate::{ layers::{ diff --git a/services/src/datasets/external/netcdfcf/metadata.rs b/services/src/datasets/external/netcdfcf/metadata.rs index 5a72449320..ba256443b1 100644 --- a/services/src/datasets/external/netcdfcf/metadata.rs +++ b/services/src/datasets/external/netcdfcf/metadata.rs @@ -4,7 +4,7 @@ use bytes::BytesMut; use geoengine_datatypes::operations::image::Colorizer; use geoengine_datatypes::raster::RasterDataType; use geoengine_datatypes::spatial_reference::SpatialReference; -use postgres_types::{to_sql_checked, FromSql, IsNull, ToSql}; +use postgres_types::{FromSql, IsNull, ToSql, to_sql_checked}; use serde::{Deserialize, Serialize}; #[derive(Debug, Clone, PartialEq)] diff --git a/services/src/datasets/external/netcdfcf/mod.rs b/services/src/datasets/external/netcdfcf/mod.rs index 0d37b26bdf..9fe5fc485c 100644 --- a/services/src/datasets/external/netcdfcf/mod.rs +++ b/services/src/datasets/external/netcdfcf/mod.rs @@ -1,12 +1,12 @@ use self::database::NetCdfDatabaseListingConfig; use self::loading::{ - create_layer, create_layer_collection_from_parts, LayerCollectionIdFn, LayerCollectionParts, + LayerCollectionIdFn, LayerCollectionParts, create_layer, create_layer_collection_from_parts, }; use self::metadata::{Creator, DataRange, NetCdfGroupMetadata, NetCdfOverviewMetadata}; use self::overviews::create_overviews; -use self::overviews::{remove_overviews, OverviewCreationOptions}; +use self::overviews::{OverviewCreationOptions, remove_overviews}; use crate::contexts::GeoEngineDb; -use crate::datasets::external::netcdfcf::loading::{create_loading_info, ParamModification}; +use crate::datasets::external::netcdfcf::loading::{ParamModification, create_loading_info}; use crate::datasets::listing::ProvenanceOutput; use crate::error::Error; use crate::layers::external::DataProvider; @@ -55,7 +55,7 @@ use walkdir::{DirEntry, WalkDir}; pub use self::database::NetCdfCfProviderDb; pub use self::ebvportal_provider::{ - EbvPortalDataProvider, EbvPortalDataProviderDefinition, EBV_PROVIDER_ID, + EBV_PROVIDER_ID, EbvPortalDataProvider, EbvPortalDataProviderDefinition, }; pub use self::error::NetCdfCf4DProviderError; pub use self::overviews::OverviewGeneration; @@ -588,7 +588,7 @@ impl NetCdfCfDataProvider { if !path.is_file() { continue; } - if path.extension().map_or(true, |extension| extension != "nc") { + if path.extension().is_none_or(|extension| extension != "nc") { continue; } @@ -1005,7 +1005,7 @@ impl TryFrom for LayerCollectionId { netcdf_group_to_layer_collection_id(&path, &groups) } NetCdfLayerCollectionId::Entity { .. } => { - return Err(crate::error::Error::InvalidLayerCollectionId) + return Err(crate::error::Error::InvalidLayerCollectionId); } }) } @@ -1495,10 +1495,10 @@ impl ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { @@ -2695,7 +2695,7 @@ mod tests { assert_eq!( provider.load_layer(&layer_id).await.unwrap().metadata["dataRange"], - "[2.0,25093.0]" + "[1.0,98.0]" ); // manipulate a field in the metadata @@ -2737,7 +2737,7 @@ mod tests { assert_eq!( provider.load_layer(&layer_id).await.unwrap().metadata["dataRange"], - "[2.0,25093.0]" + "[1.0,98.0]" ); // forcefully remove file to test error handling diff --git a/services/src/datasets/external/netcdfcf/overviews.rs b/services/src/datasets/external/netcdfcf/overviews.rs index 2e4c50abe4..7ede46eee9 100644 --- a/services/src/datasets/external/netcdfcf/overviews.rs +++ b/services/src/datasets/external/netcdfcf/overviews.rs @@ -1,21 +1,21 @@ use super::{ - build_netcdf_tree, database::InProgressFlag, error, gdal_netcdf_open, metadata::DataRange, - NetCdfCf4DProviderError, NetCdfCfProviderDb, NetCdfOverview, TimeCoverage, + NetCdfCf4DProviderError, NetCdfCfProviderDb, NetCdfOverview, TimeCoverage, build_netcdf_tree, + database::InProgressFlag, error, gdal_netcdf_open, metadata::DataRange, }; use crate::{ config::get_config_element, - datasets::external::netcdfcf::loading::{create_loading_info, ParamModification}, + datasets::external::netcdfcf::loading::{ParamModification, create_loading_info}, tasks::{TaskContext, TaskStatusInfo}, util::path_with_base_path, }; use gdal::{ + Dataset, cpl::CslStringList, errors::GdalError, programs::raster::{ - multi_dim_translate, MultiDimTranslateDestination, MultiDimTranslateOptions, + MultiDimTranslateDestination, MultiDimTranslateOptions, multi_dim_translate, }, raster::{Group, RasterCreationOptions}, - Dataset, }; use geoengine_datatypes::{ dataset::DataProviderId, error::BoxedResultExt, primitives::TimeInstance, @@ -764,8 +764,8 @@ pub async fn remove_overviews mod tests { use super::*; use crate::contexts::{PostgresDb, PostgresSessionContext}; - use crate::datasets::external::netcdfcf::database::NetCdfCfProviderDb; use crate::datasets::external::netcdfcf::NETCDF_CF_PROVIDER_ID; + use crate::datasets::external::netcdfcf::database::NetCdfCfProviderDb; use crate::{contexts::SessionContext, ge_context, tasks::util::NopTaskContext}; use gdal::{DatasetOptions, GdalOpenFlags}; use geoengine_datatypes::{ @@ -917,15 +917,21 @@ mod tests { for metric in ["metric_1", "metric_2"] { for entity in 0..3 { - assert!(dataset_folder - .join(format!("{metric}/{entity}/2000-01-01T00:00:00.000Z.tiff")) - .exists()); - assert!(dataset_folder - .join(format!("{metric}/{entity}/2001-01-01T00:00:00.000Z.tiff")) - .exists()); - assert!(dataset_folder - .join(format!("{metric}/{entity}/2002-01-01T00:00:00.000Z.tiff")) - .exists()); + assert!( + dataset_folder + .join(format!("{metric}/{entity}/2000-01-01T00:00:00.000Z.tiff")) + .exists() + ); + assert!( + dataset_folder + .join(format!("{metric}/{entity}/2001-01-01T00:00:00.000Z.tiff")) + .exists() + ); + assert!( + dataset_folder + .join(format!("{metric}/{entity}/2002-01-01T00:00:00.000Z.tiff")) + .exists() + ); } } } @@ -960,15 +966,21 @@ mod tests { for metric in ["metric_1", "metric_2"] { for entity in 0..3 { - assert!(dataset_folder - .join(format!("{metric}/{entity}/1900-01-01T00:00:00.000Z.tiff")) - .exists()); - assert!(dataset_folder - .join(format!("{metric}/{entity}/2015-01-01T00:00:00.000Z.tiff")) - .exists()); - assert!(dataset_folder - .join(format!("{metric}/{entity}/2055-01-01T00:00:00.000Z.tiff")) - .exists()); + assert!( + dataset_folder + .join(format!("{metric}/{entity}/1900-01-01T00:00:00.000Z.tiff")) + .exists() + ); + assert!( + dataset_folder + .join(format!("{metric}/{entity}/2015-01-01T00:00:00.000Z.tiff")) + .exists() + ); + assert!( + dataset_folder + .join(format!("{metric}/{entity}/2055-01-01T00:00:00.000Z.tiff")) + .exists() + ); } } @@ -1094,13 +1106,14 @@ mod tests { let db = Arc::new(ctx.db()); - assert!(!db - .overviews_exist( + assert!( + !db.overviews_exist( NETCDF_CF_PROVIDER_ID, dataset_path.to_string_lossy().as_ref() ) .await - .unwrap()); + .unwrap() + ); create_overviews( NopTaskContext, @@ -1131,12 +1144,13 @@ mod tests { assert!(is_empty(overview_folder.path())); - assert!(!db - .overviews_exist( + assert!( + !db.overviews_exist( NETCDF_CF_PROVIDER_ID, dataset_path.to_string_lossy().as_ref() ) .await - .unwrap()); + .unwrap() + ); } } diff --git a/services/src/datasets/external/pangaea/meta.rs b/services/src/datasets/external/pangaea/meta.rs index 9851ad8b3c..4fceb2134b 100644 --- a/services/src/datasets/external/pangaea/meta.rs +++ b/services/src/datasets/external/pangaea/meta.rs @@ -172,7 +172,7 @@ impl PangaeaMetaData { ErrorKind::UnexpectedEof, "End of Pangea TSV reached unexpectedly.", ), - }) + }); } }, TSVParseState::DataStart(idx) => { diff --git a/services/src/datasets/external/pangaea/mod.rs b/services/src/datasets/external/pangaea/mod.rs index fa52f21e12..27e2dc6242 100644 --- a/services/src/datasets/external/pangaea/mod.rs +++ b/services/src/datasets/external/pangaea/mod.rs @@ -247,10 +247,10 @@ impl ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { @@ -262,7 +262,7 @@ impl mod tests { use crate::contexts::{GeoEngineDb, SessionContext}; use crate::contexts::{PostgresContext, PostgresSessionContext}; - use crate::datasets::external::pangaea::{PangaeaDataProviderDefinition, PANGAEA_PROVIDER_ID}; + use crate::datasets::external::pangaea::{PANGAEA_PROVIDER_ID, PangaeaDataProviderDefinition}; use crate::error::Error; use crate::ge_context; use crate::layers::external::{DataProvider, DataProviderDefinition}; @@ -284,10 +284,9 @@ mod tests { }; use geoengine_operators::source::{OgrSource, OgrSourceDataset, OgrSourceParameters}; use httptest::{ - all_of, + Expectation, Server, all_of, matchers::{contains, request, url_decoded}, responders::status_code, - Expectation, Server, }; use std::ops::RangeInclusive; use std::path::PathBuf; diff --git a/services/src/datasets/external/sentinel_s2_l2a_cogs.rs b/services/src/datasets/external/sentinel_s2_l2a_cogs.rs index 3ed6f012b9..540c9b27ea 100644 --- a/services/src/datasets/external/sentinel_s2_l2a_cogs.rs +++ b/services/src/datasets/external/sentinel_s2_l2a_cogs.rs @@ -41,7 +41,7 @@ use log::debug; use postgres_types::{FromSql, ToSql}; use reqwest::Client; use serde::{Deserialize, Serialize}; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use std::collections::HashMap; use std::convert::TryInto; use std::fmt::Debug; @@ -861,10 +861,10 @@ impl ) -> Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, geoengine_operators::error::Error, > { @@ -903,7 +903,7 @@ mod tests { use geoengine_datatypes::{ dataset::{DatasetId, ExternalDataId}, primitives::{BandSelection, SpatialPartition2D, SpatialResolution}, - util::{gdal::hide_gdal_errors, test::TestDefault, Identifier}, + util::{Identifier, gdal::hide_gdal_errors, test::TestDefault}, }; use geoengine_operators::{ engine::{ @@ -913,10 +913,9 @@ mod tests { source::{FileNotFoundHandling, GdalMetaDataStatic, GdalSource, GdalSourceParameters}, }; use httptest::{ - all_of, + Expectation, Server, all_of, matchers::{contains, request, url_decoded}, responders::{self}, - Expectation, Server, }; use std::{fs::File, io::BufReader, str::FromStr}; use tokio_postgres::NoTls; @@ -1119,7 +1118,7 @@ mod tests { )))), request::query(url_decoded(contains(( "datetime", - // default case adds one minute to the start/end of the query to catch elements before/after + // default case adds one minute to the start/end of the query to catch elements before/after "2021-09-23T08:09:44+00:00/2021-09-23T08:11:44+00:00" )))), ]) diff --git a/services/src/datasets/listing.rs b/services/src/datasets/listing.rs index 43ffe880d6..f3e535b103 100644 --- a/services/src/datasets/listing.rs +++ b/services/src/datasets/listing.rs @@ -1,7 +1,7 @@ -use super::storage::MetaDataDefinition; use super::DatasetName; -use crate::config::{get_config_element, DatasetService}; -use crate::datasets::storage::{validate_tags, Dataset}; +use super::storage::MetaDataDefinition; +use crate::config::{DatasetService, get_config_element}; +use crate::datasets::storage::{Dataset, validate_tags}; use crate::error::Result; use crate::projects::Symbology; use async_trait::async_trait; diff --git a/services/src/datasets/mod.rs b/services/src/datasets/mod.rs index dd0a035757..2da973a784 100644 --- a/services/src/datasets/mod.rs +++ b/services/src/datasets/mod.rs @@ -8,8 +8,8 @@ pub mod storage; pub mod upload; pub(crate) use create_from_workflow::{ - schedule_raster_dataset_from_workflow_task, RasterDatasetFromWorkflow, - RasterDatasetFromWorkflowResult, + RasterDatasetFromWorkflow, RasterDatasetFromWorkflowResult, + schedule_raster_dataset_from_workflow_task, }; pub use name::{DatasetIdAndName, DatasetName, DatasetNameError}; pub use storage::AddDataset; diff --git a/services/src/datasets/name.rs b/services/src/datasets/name.rs index 0315a4033c..52a2f9d562 100644 --- a/services/src/datasets/name.rs +++ b/services/src/datasets/name.rs @@ -1,6 +1,6 @@ use geoengine_datatypes::dataset::{DatasetId, NamedData}; use postgres_types::{FromSql, ToSql}; -use serde::{de::Visitor, Deserialize, Serialize}; +use serde::{Deserialize, Serialize, de::Visitor}; use snafu::Snafu; use std::str::FromStr; use strum::IntoStaticStr; diff --git a/services/src/datasets/postgres.rs b/services/src/datasets/postgres.rs index c3102a0a23..17b1b97a7f 100644 --- a/services/src/datasets/postgres.rs +++ b/services/src/datasets/postgres.rs @@ -13,10 +13,10 @@ use crate::permissions::{Permission, RoleId}; use crate::projects::Symbology; use crate::util::postgres::PostgresErrorExt; use async_trait::async_trait; +use bb8_postgres::PostgresConnectionManager; use bb8_postgres::bb8::PooledConnection; -use bb8_postgres::tokio_postgres::tls::{MakeTlsConnect, TlsConnect}; use bb8_postgres::tokio_postgres::Socket; -use bb8_postgres::PostgresConnectionManager; +use bb8_postgres::tokio_postgres::tls::{MakeTlsConnect, TlsConnect}; use geoengine_datatypes::dataset::{DataId, DatasetId}; use geoengine_datatypes::error::BoxedResultExt; use geoengine_datatypes::primitives::RasterQueryRectangle; @@ -415,10 +415,10 @@ where ) -> geoengine_operators::util::Result< Box< dyn MetaData< - MockDatasetDataSourceLoadingInfo, - VectorResultDescriptor, - VectorQueryRectangle, - >, + MockDatasetDataSourceLoadingInfo, + VectorResultDescriptor, + VectorQueryRectangle, + >, >, > { Err(geoengine_operators::error::Error::NotYetImplemented) @@ -936,16 +936,23 @@ mod tests { // check that other user B cannot access datasets of user A - assert!(db_b - .dataset_autocomplete_search(None, "Ogr".to_owned(), 10, 0) - .await - .unwrap() - .is_empty()); - assert!(db_b - .dataset_autocomplete_search(Some(vec!["upload".to_string()]), "Ogr".to_owned(), 10, 0) + assert!( + db_b.dataset_autocomplete_search(None, "Ogr".to_owned(), 10, 0) + .await + .unwrap() + .is_empty() + ); + assert!( + db_b.dataset_autocomplete_search( + Some(vec!["upload".to_string()]), + "Ogr".to_owned(), + 10, + 0 + ) .await .unwrap() - .is_empty()); + .is_empty() + ); } #[ge_context::test] diff --git a/services/src/error.rs b/services/src/error.rs index 72adfb9fb1..3f38cb6c04 100644 --- a/services/src/error.rs +++ b/services/src/error.rs @@ -5,8 +5,8 @@ use crate::api::model::responses::ErrorResponse; use crate::datasets::external::aruna::error::ArunaProviderError; use crate::datasets::external::netcdfcf::NetCdfCf4DProviderError; use crate::{layers::listing::LayerCollectionId, workflows::workflow::WorkflowId}; -use actix_web::http::StatusCode; use actix_web::HttpResponse; +use actix_web::http::StatusCode; use geoengine_datatypes::dataset::LayerId; use geoengine_datatypes::error::ErrorSource; use geoengine_datatypes::util::helpers::ge_report; diff --git a/services/src/layers/layer.rs b/services/src/layers/layer.rs index a12d2b1f55..af49e995f9 100644 --- a/services/src/layers/layer.rs +++ b/services/src/layers/layer.rs @@ -1,5 +1,5 @@ use super::listing::LayerCollectionId; -use crate::config::{get_config_element, LayerService}; +use crate::config::{LayerService, get_config_element}; use crate::{projects::Symbology, workflows::workflow::Workflow}; use geoengine_datatypes::dataset::{DataProviderId, LayerId}; use serde::{Deserialize, Serialize}; diff --git a/services/src/layers/postgres_layer_db.rs b/services/src/layers/postgres_layer_db.rs index 37eb814d1d..85b2cc1dc5 100644 --- a/services/src/layers/postgres_layer_db.rs +++ b/services/src/layers/postgres_layer_db.rs @@ -9,8 +9,8 @@ use super::listing::{ SearchType, SearchTypes, }; use super::storage::{ - LayerDb, LayerProviderDb, LayerProviderListing, LayerProviderListingOptions, - INTERNAL_PROVIDER_ID, + INTERNAL_PROVIDER_ID, LayerDb, LayerProviderDb, LayerProviderListing, + LayerProviderListingOptions, }; use crate::contexts::PostgresDb; use crate::layers::external::DataProviderDefinition; @@ -19,22 +19,22 @@ use crate::workflows::registry::TxWorkflowRegistry; use crate::{ error::{self, Result}, layers::{ + LayerDbError, layer::{AddLayer, AddLayerCollection}, listing::LayerCollectionId, storage::INTERNAL_LAYER_DB_ROOT_COLLECTION_ID, - LayerDbError, }, }; +use bb8_postgres::PostgresConnectionManager; use bb8_postgres::bb8::PooledConnection; use bb8_postgres::tokio_postgres::{ - tls::{MakeTlsConnect, TlsConnect}, Socket, + tls::{MakeTlsConnect, TlsConnect}, }; -use bb8_postgres::PostgresConnectionManager; use geoengine_datatypes::dataset::{DataProviderId, LayerId}; use geoengine_datatypes::error::BoxedResultExt; use geoengine_datatypes::util::HashMapTextTextDbType; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use std::str::FromStr; use tokio_postgres::Transaction; use tonic::async_trait; diff --git a/services/src/machine_learning/mod.rs b/services/src/machine_learning/mod.rs index fcfe30a508..814d3459b5 100644 --- a/services/src/machine_learning/mod.rs +++ b/services/src/machine_learning/mod.rs @@ -1,12 +1,12 @@ use crate::{ api::model::datatypes::RasterDataType, - config::{get_config_element, MachineLearning}, + config::{MachineLearning, get_config_element}, datasets::upload::{UploadId, UploadRootPath}, identifier, util::path_with_base_path, }; use async_trait::async_trait; -use error::{error::CouldNotFindMlModelFileMachineLearningError, MachineLearningError}; +use error::{MachineLearningError, error::CouldNotFindMlModelFileMachineLearningError}; use name::MlModelName; use postgres_types::{FromSql, ToSql}; use serde::{Deserialize, Serialize}; diff --git a/services/src/machine_learning/name.rs b/services/src/machine_learning/name.rs index a06fe5e10a..ad5c99c231 100644 --- a/services/src/machine_learning/name.rs +++ b/services/src/machine_learning/name.rs @@ -1,5 +1,5 @@ use postgres_types::{FromSql, ToSql}; -use serde::{de::Visitor, Deserialize, Serialize}; +use serde::{Deserialize, Serialize, de::Visitor}; use utoipa::ToSchema; const NAME_DELIMITER: char = ':'; diff --git a/services/src/machine_learning/postgres.rs b/services/src/machine_learning/postgres.rs index 53ea177a1f..d3c551c6b4 100644 --- a/services/src/machine_learning/postgres.rs +++ b/services/src/machine_learning/postgres.rs @@ -1,12 +1,12 @@ use crate::{ contexts::PostgresDb, machine_learning::{ + MlModel, MlModelDb, MlModelId, MlModelIdAndName, MlModelListOptions, MlModelMetadata, error::{ - error::{Bb8MachineLearningError, PostgresMachineLearningError}, MachineLearningError, + error::{Bb8MachineLearningError, PostgresMachineLearningError}, }, name::MlModelName, - MlModel, MlModelDb, MlModelId, MlModelIdAndName, MlModelListOptions, MlModelMetadata, }, permissions::Permission, util::postgres::PostgresErrorExt, @@ -15,8 +15,8 @@ use async_trait::async_trait; use geoengine_datatypes::util::Identifier; use snafu::ResultExt; use tokio_postgres::{ - tls::{MakeTlsConnect, TlsConnect}, Socket, + tls::{MakeTlsConnect, TlsConnect}, }; #[async_trait] diff --git a/services/src/permissions/postgres_permissiondb.rs b/services/src/permissions/postgres_permissiondb.rs index 1848b5402c..fd659acba4 100644 --- a/services/src/permissions/postgres_permissiondb.rs +++ b/services/src/permissions/postgres_permissiondb.rs @@ -9,10 +9,10 @@ use crate::permissions::{ MustBeAdminPermissionDbError, PermissionDeniedPermissionDbError, Role, }; use async_trait::async_trait; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use tokio_postgres::{ - tls::{MakeTlsConnect, TlsConnect}, Socket, + tls::{MakeTlsConnect, TlsConnect}, }; use uuid::Uuid; diff --git a/services/src/projects/postgres_projectdb.rs b/services/src/projects/postgres_projectdb.rs index 3549b1ee90..95366fe6e0 100644 --- a/services/src/projects/postgres_projectdb.rs +++ b/services/src/projects/postgres_projectdb.rs @@ -4,12 +4,12 @@ use crate::contexts::PostgresDb; use crate::error::Result; use crate::permissions::Permission; use crate::permissions::TxPermissionDb; -use crate::projects::error::{ - AccessFailedProjectDbError, Bb8ProjectDbError, ProjectNotFoundProjectDbError, -}; use crate::projects::LoadVersion; use crate::projects::Plot; use crate::projects::ProjectLayer; +use crate::projects::error::{ + AccessFailedProjectDbError, Bb8ProjectDbError, ProjectNotFoundProjectDbError, +}; use crate::projects::{ CreateProject, Project, ProjectDb, ProjectId, ProjectListOptions, ProjectListing, ProjectVersion, ProjectVersionId, UpdateProject, @@ -20,10 +20,10 @@ use crate::workflows::workflow::WorkflowId; use async_trait::async_trait; use bb8_postgres::tokio_postgres::Transaction; use bb8_postgres::{ - tokio_postgres::tls::MakeTlsConnect, tokio_postgres::tls::TlsConnect, tokio_postgres::Socket, + tokio_postgres::Socket, tokio_postgres::tls::MakeTlsConnect, tokio_postgres::tls::TlsConnect, }; use geoengine_datatypes::error::BoxedResultExt; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use tokio_postgres::Row; #[async_trait] diff --git a/services/src/projects/project.rs b/services/src/projects/project.rs index 02c7b2c7e9..8d5ef64ff6 100644 --- a/services/src/projects/project.rs +++ b/services/src/projects/project.rs @@ -1,5 +1,5 @@ -use crate::config::get_config_element; use crate::config::ProjectService; +use crate::config::get_config_element; use crate::error::Result; use crate::identifier; use crate::projects::error::ProjectDbError; @@ -626,28 +626,30 @@ mod tests { #[test] fn strectangle_serialization() { - assert!(serde_json::from_str::( - &json!({ - "spatialReference": "EPSG:4326", - "boundingBox": { - "lowerLeftCoordinate": { - "x": -180, - "y": -90 - }, - "upperRightCoordinate": { - "x": 180, - "y": 90 + assert!( + serde_json::from_str::( + &json!({ + "spatialReference": "EPSG:4326", + "boundingBox": { + "lowerLeftCoordinate": { + "x": -180, + "y": -90 + }, + "upperRightCoordinate": { + "x": 180, + "y": 90 + } + }, + "timeInterval": { + "start": 0, + "end": 0 + } } - }, - "timeInterval": { - "start": 0, - "end": 0 - } - } + ) + .to_string(), ) - .to_string(), - ) - .is_ok()); + .is_ok() + ); } #[test] diff --git a/services/src/projects/projectdb.rs b/services/src/projects/projectdb.rs index 7463832977..7a4501e973 100644 --- a/services/src/projects/projectdb.rs +++ b/services/src/projects/projectdb.rs @@ -4,7 +4,7 @@ use crate::projects::project::{ use async_trait::async_trait; -use super::{error::ProjectDbError, LoadVersion, ProjectVersion}; +use super::{LoadVersion, ProjectVersion, error::ProjectDbError}; /// Storage of user projects #[async_trait] diff --git a/services/src/quota/mod.rs b/services/src/quota/mod.rs index 19477d9b38..dbcf805742 100644 --- a/services/src/quota/mod.rs +++ b/services/src/quota/mod.rs @@ -5,7 +5,7 @@ use geoengine_operators::meta::quota::{ComputationUnit, QuotaMessage, QuotaTrack use serde::{Deserialize, Serialize}; use snafu::Snafu; use std::{collections::HashMap, time::Duration}; -use tokio::sync::mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}; +use tokio::sync::mpsc::{UnboundedReceiver, UnboundedSender, unbounded_channel}; use utoipa::ToSchema; use uuid::Uuid; @@ -222,7 +222,7 @@ mod tests { contexts::{ApplicationContext, SessionContext}, ge_context, users::{UserAuth, UserCredentials, UserRegistration}, - util::tests::{admin_login, MockQuotaTracking}, + util::tests::{MockQuotaTracking, admin_login}, }; use tokio_postgres::NoTls; diff --git a/services/src/server.rs b/services/src/server.rs index ffd25fe695..3495980fff 100644 --- a/services/src/server.rs +++ b/services/src/server.rs @@ -8,11 +8,11 @@ use crate::users::UserSession; use crate::util::middleware::OutputRequestId; use crate::util::postgres::DatabaseConnectionConfig; use crate::util::server::{ - calculate_max_blocking_threads_per_worker, configure_extractors, connection_init, - log_server_info, render_404, render_405, serve_openapi_json, CustomRootSpanBuilder, + CustomRootSpanBuilder, calculate_max_blocking_threads_per_worker, configure_extractors, + connection_init, log_server_info, render_404, render_405, serve_openapi_json, }; use actix_files::Files; -use actix_web::{http, middleware, web, App, FromRequest, HttpServer}; +use actix_web::{App, FromRequest, HttpServer, http, middleware, web}; use bb8_postgres::tokio_postgres::NoTls; use geoengine_datatypes::raster::TilingSpecification; use geoengine_operators::engine::ChunkByteSize; diff --git a/services/src/stac/mod.rs b/services/src/stac/mod.rs index d7aeb4b6ee..5bc181cd69 100644 --- a/services/src/stac/mod.rs +++ b/services/src/stac/mod.rs @@ -2,7 +2,7 @@ use std::{collections::HashMap, convert::TryFrom}; use geo::Rect; use geoengine_datatypes::primitives::DateTime; -use serde::{de::value::MapDeserializer, de::Error, Deserialize, Deserializer}; +use serde::{Deserialize, Deserializer, de::Error, de::value::MapDeserializer}; use serde_with::with_prefix; use snafu::Snafu; diff --git a/services/src/tasks/in_memory.rs b/services/src/tasks/in_memory.rs index 400278ee12..de01b76c80 100644 --- a/services/src/tasks/in_memory.rs +++ b/services/src/tasks/in_memory.rs @@ -3,11 +3,11 @@ use super::{ TaskListOptions, TaskManager, TaskStatus, TaskStatusInfo, TaskStatusWithId, }; use crate::{contexts::Db, error::Result}; -use futures::channel::oneshot; use futures::StreamExt; +use futures::channel::oneshot; use geoengine_datatypes::{ error::ErrorSource, - util::{helpers::ge_report, Identifier}, + util::{Identifier, helpers::ge_report}, }; use log::warn; use std::{ diff --git a/services/src/users/oidc.rs b/services/src/users/oidc.rs index 4f4b759a6d..a76f6f743f 100644 --- a/services/src/users/oidc.rs +++ b/services/src/users/oidc.rs @@ -635,6 +635,7 @@ impl OidcRequestClient { #[cfg(test)] mod tests { use super::*; + use crate::users::OidcError::IllegalRequestToken; use crate::users::oidc::OidcError::{ IllegalProvider, LoginFailed, ProviderDiscovery, ResponseFieldError, TokenExchangeError, }; @@ -642,10 +643,9 @@ mod tests { AuthCodeResponse, DefaultClient, DefaultJsonWebKeySet, DefaultProviderMetadata, OidcRequestDb, }; - use crate::users::OidcError::IllegalRequestToken; use crate::util::tests::mock_oidc::{ - mock_jwks, mock_provider_metadata, mock_token_response, MockTokenConfig, SINGLE_NONCE, - SINGLE_STATE, + MockTokenConfig, SINGLE_NONCE, SINGLE_STATE, mock_jwks, mock_provider_metadata, + mock_token_response, }; use httptest::matchers::request; use httptest::responders::status_code; diff --git a/services/src/users/postgres_userdb.rs b/services/src/users/postgres_userdb.rs index 4d0f9175a9..acd9fa2582 100644 --- a/services/src/users/postgres_userdb.rs +++ b/services/src/users/postgres_userdb.rs @@ -14,19 +14,19 @@ use crate::users::{ SessionTokenStore, StoredOidcTokens, User, UserCredentials, UserDb, UserId, UserInfo, UserRegistration, UserSession, }; -use crate::util::postgres::PostgresErrorExt; use crate::util::Identifier; +use crate::util::postgres::PostgresErrorExt; use crate::{contexts::PostgresContext, error}; use async_trait::async_trait; use geoengine_operators::meta::quota::ComputationUnit; use crate::util::encryption::MaybeEncryptedBytes; use bb8_postgres::{ - tokio_postgres::tls::MakeTlsConnect, tokio_postgres::tls::TlsConnect, tokio_postgres::Socket, + tokio_postgres::Socket, tokio_postgres::tls::MakeTlsConnect, tokio_postgres::tls::TlsConnect, }; use oauth2::AccessToken; use pwhash::bcrypt; -use snafu::{ensure, ResultExt}; +use snafu::{ResultExt, ensure}; use tokio_postgres::Transaction; use uuid::Uuid; diff --git a/services/src/users/session.rs b/services/src/users/session.rs index 34e03ede4c..69d953feb2 100644 --- a/services/src/users/session.rs +++ b/services/src/users/session.rs @@ -7,11 +7,11 @@ use crate::projects::{ProjectId, STRectangle}; use crate::users::UserId; use crate::util::Identifier; use actix_http::Payload; -use actix_web::{web, FromRequest, HttpRequest}; +use actix_web::{FromRequest, HttpRequest, web}; use bb8_postgres::tokio_postgres::NoTls; use futures::future::err; -use futures_util::future::LocalBoxFuture; use futures_util::FutureExt; +use futures_util::future::LocalBoxFuture; use geoengine_datatypes::primitives::DateTime; use serde::{Deserialize, Serialize}; use utoipa::ToSchema; @@ -96,7 +96,7 @@ impl FromRequest for UserSession { "Application context should be present because it is set during server initialization.", ); let pg_ctx = pg_ctx.get_ref().clone(); - async move { pg_ctx.session_by_id(token).await.map_err(Into::into) }.boxed_local() + async move { pg_ctx.session_by_id(token).await }.boxed_local() } } diff --git a/services/src/util/apidoc.rs b/services/src/util/apidoc.rs index a646ca7a99..a6f22eb386 100644 --- a/services/src/util/apidoc.rs +++ b/services/src/util/apidoc.rs @@ -1,8 +1,8 @@ use std::collections::BTreeMap; use utoipa::{ - openapi::{Discriminator, OneOfBuilder, Ref, RefOr, Schema}, Modify, + openapi::{Discriminator, OneOfBuilder, Ref, RefOr, Schema}, }; use super::openapi_visitors::get_schema_use_counts; @@ -17,9 +17,9 @@ impl Modify for OpenApiServerInfo { let mut api_url = web_config.api_url().expect("external address").to_string(); api_url.pop(); //remove trailing slash because codegen requires it - openapi.servers = Some(vec![utoipa::openapi::ServerBuilder::new() - .url(api_url) - .build()]); + openapi.servers = Some(vec![ + utoipa::openapi::ServerBuilder::new().url(api_url).build(), + ]); } } @@ -183,8 +183,10 @@ impl Modify for TransformSchemasWithTag { if matches!(schema_uses.get(&variant_schema_name), Some(count) if count != &1usize) { - panic!("The type {variant_schema_name} is used in the enum {schema_name} as payload, but also in other places. \ - You have to use a newly created struct, anonymous struct variant or set #[schema(title = \"XXX\")] on the variant."); + panic!( + "The type {variant_schema_name} is used in the enum {schema_name} as payload, but also in other places. \ + You have to use a newly created struct, anonymous struct variant or set #[schema(title = \"XXX\")] on the variant." + ); } if let Some(flattened) = Self::flatten_allof(item, old_schemas) { @@ -216,8 +218,8 @@ mod tests { use serde::Serialize; use serde_json::json; use utoipa::{ - openapi::{path::*, *}, Modify, ToSchema, + openapi::{path::*, *}, }; #[test] diff --git a/services/src/util/encryption.rs b/services/src/util/encryption.rs index a0b39ac632..cb094fb16a 100644 --- a/services/src/util/encryption.rs +++ b/services/src/util/encryption.rs @@ -5,7 +5,7 @@ use aes_gcm::aead::{Aead, OsRng}; use aes_gcm::{AeadCore, Aes256Gcm, Key, KeyInit, Nonce}; use bytes::BytesMut; use pbkdf2::pbkdf2_hmac_array; -use postgres_types::{accepts, FromSql, IsNull, ToSql, Type}; +use postgres_types::{FromSql, IsNull, ToSql, Type, accepts}; use sha2::Sha256; use snafu::Snafu; use std::error::Error; diff --git a/services/src/util/extractors.rs b/services/src/util/extractors.rs index 6ca8d7cc41..cfe56fd4db 100644 --- a/services/src/util/extractors.rs +++ b/services/src/util/extractors.rs @@ -1,9 +1,9 @@ use std::fmt; use actix_http::Payload; -use actix_web::{web, FromRequest, HttpRequest}; -use futures::future::LocalBoxFuture; +use actix_web::{FromRequest, HttpRequest, web}; use futures::FutureExt; +use futures::future::LocalBoxFuture; use serde::de::DeserializeOwned; use validator::Validate; diff --git a/services/src/util/middleware.rs b/services/src/util/middleware.rs index e3043f5f76..9f7a4c4a9b 100644 --- a/services/src/util/middleware.rs +++ b/services/src/util/middleware.rs @@ -1,12 +1,12 @@ -use std::future::{ready, Ready}; +use std::future::{Ready, ready}; use actix_web::{ - dev::{forward_ready, Service, ServiceRequest, ServiceResponse, Transform}, - http::header::{HeaderName, HeaderValue}, Error, HttpMessage, + dev::{Service, ServiceRequest, ServiceResponse, Transform, forward_ready}, + http::header::{HeaderName, HeaderValue}, }; use futures_util::future::LocalBoxFuture; -use tracing::{event_enabled, Level}; +use tracing::{Level, event_enabled}; use tracing_actix_web::RequestId; const REQUEST_ID_HEADER: &str = "x-request-id"; diff --git a/services/src/util/openapi_examples.rs b/services/src/util/openapi_examples.rs index 0f6ab72a7a..c0dafd8bb2 100644 --- a/services/src/util/openapi_examples.rs +++ b/services/src/util/openapi_examples.rs @@ -3,7 +3,7 @@ use crate::contexts::PostgresContext; use crate::contexts::SessionId; use crate::users::UserAuth; use actix_web::dev::ServiceResponse; -use actix_web::http::{header, Method}; +use actix_web::http::{Method, header}; use actix_web::test::TestRequest; use actix_web_httpauth::headers::authorization::Bearer; use std::collections::HashMap; @@ -238,7 +238,9 @@ pub async fn can_run_examples( RefOr::Ref(_reference) => { // This never happened during testing. // It is undocumented how the references would look like. - panic!("checking pro examples with references is not yet implemented") + panic!( + "checking pro examples with references is not yet implemented" + ) } RefOr::T(concrete) => { if let Some(body) = concrete.value { @@ -278,16 +280,16 @@ mod tests { use crate::api::model::services::Volume; use crate::ge_context; use crate::util::server::{configure_extractors, render_404, render_405}; - use actix_web::{http, middleware, post, web, App, HttpResponse, Responder}; + use actix_web::{App, HttpResponse, Responder, http, middleware, post, web}; use serde::Deserialize; use serde_json::json; + use utoipa::ToSchema; use utoipa::openapi::path::{OperationBuilder, ParameterBuilder, PathItemBuilder}; use utoipa::openapi::request_body::RequestBodyBuilder; use utoipa::openapi::{ ComponentsBuilder, ContentBuilder, Object, ObjectBuilder, OpenApiBuilder, PathItemType, PathsBuilder, }; - use utoipa::ToSchema; #[derive(Deserialize)] struct DummyQueryParams { diff --git a/services/src/util/openapi_visitor.rs b/services/src/util/openapi_visitor.rs index 1405b4220a..010c5b1f4d 100644 --- a/services/src/util/openapi_visitor.rs +++ b/services/src/util/openapi_visitor.rs @@ -1,5 +1,5 @@ use utoipa::openapi::{ - schema::AdditionalProperties, Components, OpenApi, Ref, RefOr, Response, Schema, + Components, OpenApi, Ref, RefOr, Response, Schema, schema::AdditionalProperties, }; pub trait OpenapiVisitor { diff --git a/services/src/util/openapi_visitors.rs b/services/src/util/openapi_visitors.rs index dee408b6fe..0270dfec3c 100644 --- a/services/src/util/openapi_visitors.rs +++ b/services/src/util/openapi_visitors.rs @@ -2,7 +2,7 @@ use std::collections::{HashMap, HashSet}; use utoipa::openapi::{OpenApi, RefOr, Schema}; -use super::openapi_visitor::{visit_api, OpenapiVisitor}; +use super::openapi_visitor::{OpenapiVisitor, visit_api}; struct CanResolveVisitor { pub unknown_ref: Option, @@ -67,11 +67,11 @@ pub fn get_schema_use_counts(api: &OpenApi) -> HashMap { mod tests { use geoengine_datatypes::hashmap; use utoipa::openapi::{ - path::{OperationBuilder, ParameterBuilder, PathItemBuilder}, - request_body::RequestBodyBuilder, AllOfBuilder, ArrayBuilder, Components, ComponentsBuilder, ContentBuilder, Object, ObjectBuilder, OneOfBuilder, OpenApiBuilder, PathItemType, PathsBuilder, Ref, ResponseBuilder, + path::{OperationBuilder, ParameterBuilder, PathItemBuilder}, + request_body::RequestBodyBuilder, }; use crate::util::openapi_visitor::visit_schema; diff --git a/services/src/util/operators.rs b/services/src/util/operators.rs index f47af0f898..0f995de0f5 100644 --- a/services/src/util/operators.rs +++ b/services/src/util/operators.rs @@ -36,7 +36,7 @@ pub fn source_operator_from_dataset( s => { return Err(crate::error::Error::UnknownOperator { operator: s.to_owned(), - }) + }); } }) } diff --git a/services/src/util/parsing.rs b/services/src/util/parsing.rs index e229e68ba5..872e912dbd 100644 --- a/services/src/util/parsing.rs +++ b/services/src/util/parsing.rs @@ -1,8 +1,8 @@ use crate::api::model::datatypes::BandSelection; use geoengine_datatypes::primitives::{Coordinate2D, SpatialPartition2D, SpatialResolution}; +use serde::Deserialize; use serde::de; use serde::de::Error; -use serde::Deserialize; use std::fmt; use std::marker::PhantomData; use std::str::FromStr; diff --git a/services/src/util/postgres.rs b/services/src/util/postgres.rs index d4c0c79444..4ead85effc 100644 --- a/services/src/util/postgres.rs +++ b/services/src/util/postgres.rs @@ -1,4 +1,4 @@ -use bb8_postgres::{bb8::PooledConnection, PostgresConnectionManager}; +use bb8_postgres::{PostgresConnectionManager, bb8::PooledConnection}; use serde::{Deserialize, Serialize}; use tokio_postgres::Config; diff --git a/services/src/util/server.rs b/services/src/util/server.rs index 18f920f2ca..5e0d7486fb 100644 --- a/services/src/util/server.rs +++ b/services/src/util/server.rs @@ -9,7 +9,7 @@ use actix_http::{Extensions, HttpMessage, StatusCode}; use actix_web::dev::{ServiceFactory, ServiceRequest, ServiceResponse}; use actix_web::error::{InternalError, JsonPayloadError, QueryPayloadError}; -use actix_web::{http, middleware, web, HttpRequest, HttpResponse}; +use actix_web::{HttpRequest, HttpResponse, http, middleware, web}; use futures::future::BoxFuture; use geoengine_datatypes::primitives::CacheHint; use log::debug; @@ -17,11 +17,11 @@ use log::debug; use std::any::Any; use std::num::NonZeroUsize; use std::time::Duration; -use tracing::log::info; use tracing::Span; +use tracing::log::info; use tracing_actix_web::{RequestId, RootSpanBuilder}; use url::Url; -use utoipa::{openapi::OpenApi, ToSchema}; +use utoipa::{ToSchema, openapi::OpenApi}; /// Custom root span for web requests that paste a request id to all logs. pub struct CustomRootSpanBuilder; @@ -371,7 +371,7 @@ pub fn connection_closed(req: &HttpRequest, timeout: Option) -> BoxFut let mut data = vec![]; let start = Instant::now(); - while timeout.map_or(true, |t| start.elapsed() >= t) { + while timeout.is_none_or(|t| start.elapsed() >= t) { let r = nix::sys::socket::recv(fd, data.as_mut_slice(), MsgFlags::MSG_PEEK); match r { diff --git a/services/src/util/tests.rs b/services/src/util/tests.rs index 068661ea30..541ffc784e 100644 --- a/services/src/util/tests.rs +++ b/services/src/util/tests.rs @@ -2,17 +2,17 @@ use super::postgres::DatabaseConnectionConfig; use crate::api::model::responses::ErrorResponse; -use crate::config::{get_config_element, Postgres}; +use crate::config::{Postgres, get_config_element}; use crate::contexts::ApplicationContext; use crate::contexts::GeoEngineDb; use crate::contexts::PostgresContext; +use crate::datasets::AddDataset; +use crate::datasets::DatasetIdAndName; +use crate::datasets::DatasetName; use crate::datasets::listing::Provenance; use crate::datasets::storage::DatasetStore; use crate::datasets::upload::UploadId; use crate::datasets::upload::UploadRootPath; -use crate::datasets::AddDataset; -use crate::datasets::DatasetIdAndName; -use crate::datasets::DatasetName; use crate::permissions::Permission; use crate::permissions::PermissionDb; use crate::permissions::Role; @@ -21,9 +21,9 @@ use crate::projects::{ Symbology, UpdateProject, }; use crate::users::OidcManager; +use crate::util::Identifier; use crate::util::middleware::OutputRequestId; use crate::util::server::{configure_extractors, render_404, render_405}; -use crate::util::Identifier; use crate::workflows::registry::WorkflowRegistry; use crate::workflows::workflow::{Workflow, WorkflowId}; use crate::{ @@ -38,10 +38,10 @@ use crate::{ }; use actix_web::dev::ServiceResponse; use actix_web::{ - http, http::header, http::Method, middleware, test, web, App, HttpResponse, Responder, + App, HttpResponse, Responder, http, http::Method, http::header, middleware, test, web, }; -use bb8_postgres::bb8::ManageConnection; use bb8_postgres::PostgresConnectionManager; +use bb8_postgres::bb8::ManageConnection; use flexi_logger::Logger; use futures_util::Future; use geoengine_datatypes::dataset::DatasetId; @@ -432,29 +432,35 @@ pub async fn add_file_definition_to_datasets( // rewrite metadata to use the correct file path def.meta_data = match def.meta_data { MetaDataDefinition::GdalStatic(mut meta_data) => { - meta_data.params.file_path = test_data!(meta_data - .params - .file_path - .strip_prefix("test_data/") - .unwrap()) + meta_data.params.file_path = test_data!( + meta_data + .params + .file_path + .strip_prefix("test_data/") + .unwrap() + ) .into(); MetaDataDefinition::GdalStatic(meta_data) } MetaDataDefinition::GdalMetaDataRegular(mut meta_data) => { - meta_data.params.file_path = test_data!(meta_data - .params - .file_path - .strip_prefix("test_data/") - .unwrap()) + meta_data.params.file_path = test_data!( + meta_data + .params + .file_path + .strip_prefix("test_data/") + .unwrap() + ) .into(); MetaDataDefinition::GdalMetaDataRegular(meta_data) } MetaDataDefinition::OgrMetaData(mut meta_data) => { - meta_data.loading_info.file_name = test_data!(meta_data - .loading_info - .file_name - .strip_prefix("test_data/") - .unwrap()) + meta_data.loading_info.file_name = test_data!( + meta_data + .loading_info + .file_name + .strip_prefix("test_data/") + .unwrap() + ) .into(); MetaDataDefinition::OgrMetaData(meta_data) } @@ -1047,7 +1053,7 @@ pub(crate) mod mock_oidc { use chrono::{Duration, Utc}; use httptest::matchers::{matches, request}; use httptest::responders::status_code; - use httptest::{all_of, Expectation, Server}; + use httptest::{Expectation, Server, all_of}; use oauth2::basic::BasicTokenType; use oauth2::{ AccessToken, AuthUrl, EmptyExtraTokenFields, RefreshToken, Scope, StandardTokenResponse, diff --git a/services/src/workflows/postgres_workflow_registry.rs b/services/src/workflows/postgres_workflow_registry.rs index 6386c161b0..7cd46e6c00 100644 --- a/services/src/workflows/postgres_workflow_registry.rs +++ b/services/src/workflows/postgres_workflow_registry.rs @@ -5,7 +5,7 @@ use crate::workflows::workflow::{Workflow, WorkflowId}; use crate::{error, workflows::registry::WorkflowRegistry}; use async_trait::async_trait; use bb8_postgres::{ - tokio_postgres::tls::MakeTlsConnect, tokio_postgres::tls::TlsConnect, tokio_postgres::Socket, + tokio_postgres::Socket, tokio_postgres::tls::MakeTlsConnect, tokio_postgres::tls::TlsConnect, }; use snafu::ResultExt; diff --git a/services/src/workflows/raster_stream.rs b/services/src/workflows/raster_stream.rs index 2a848d569b..d86ecef376 100644 --- a/services/src/workflows/raster_stream.rs +++ b/services/src/workflows/raster_stream.rs @@ -1,10 +1,10 @@ use crate::{contexts::SessionContext, error::Result}; use actix::{ - fut::wrap_future, Actor, ActorContext, ActorFutureExt, AsyncContext, SpawnHandle, StreamHandler, + Actor, ActorContext, ActorFutureExt, AsyncContext, SpawnHandle, StreamHandler, fut::wrap_future, }; use actix_http::ws::{CloseCode, CloseReason}; use actix_web_actors::ws; -use futures::{stream::BoxStream, FutureExt, StreamExt, TryFutureExt, TryStreamExt}; +use futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt, stream::BoxStream}; use geoengine_datatypes::{ primitives::RasterQueryRectangle, raster::raster_tile_2d_to_arrow_ipc_file, }; diff --git a/services/src/workflows/vector_stream.rs b/services/src/workflows/vector_stream.rs index 8617d5effa..df59676b98 100644 --- a/services/src/workflows/vector_stream.rs +++ b/services/src/workflows/vector_stream.rs @@ -1,10 +1,10 @@ use crate::{contexts::SessionContext, error::Result}; use actix::{ - fut::wrap_future, Actor, ActorContext, ActorFutureExt, AsyncContext, SpawnHandle, StreamHandler, + Actor, ActorContext, ActorFutureExt, AsyncContext, SpawnHandle, StreamHandler, fut::wrap_future, }; use actix_http::ws::{CloseCode, CloseReason, Item}; use actix_web_actors::ws; -use futures::{stream::BoxStream, FutureExt, StreamExt, TryFutureExt, TryStreamExt}; +use futures::{FutureExt, StreamExt, TryFutureExt, TryStreamExt, stream::BoxStream}; use geoengine_datatypes::{collections::FeatureCollectionIpc, primitives::VectorQueryRectangle}; use geoengine_operators::{ call_on_generic_vector_processor,