diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 211d44bd5f6..f7e737cbfd6 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @s8sato @appetrosyan @mversic @arjentix @SamHSmith @QuentinI @outoftardis @ilchu @pesterev @Erigara @0x009922 @DCNick3 +* @s8sato @mversic @arjentix @SamHSmith @QuentinI @outoftardis @ilchu @pesterev @Erigara @0x009922 @DCNick3 diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 3085bf1ac2f..ab66c60c668 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -25,12 +25,6 @@ updates: schedule: interval: "daily" - - package-ecosystem: "cargo" - target-branch: "iroha2-dev" - directory: "/wasm/" - schedule: - interval: "daily" - - package-ecosystem: "cargo" target-branch: "iroha2-dev" directory: "/default_validator/" diff --git a/.github/workflows/iroha2-ci-image.yml b/.github/workflows/iroha2-ci-image.yml index 400681ffc40..c7721b6e6d2 100644 --- a/.github/workflows/iroha2-ci-image.yml +++ b/.github/workflows/iroha2-ci-image.yml @@ -6,13 +6,13 @@ jobs: dockerhub: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: docker/login-action@v2 + - uses: actions/checkout@v4 + - uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push iroha2-ci image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: push: true tags: hyperledger/iroha2-ci:nightly-2023-06-25 diff --git a/.github/workflows/iroha2-dev-nightly.yml b/.github/workflows/iroha2-dev-nightly.yml index 766cbdc4403..94f98f711c8 100644 --- a/.github/workflows/iroha2-dev-nightly.yml +++ b/.github/workflows/iroha2-dev-nightly.yml @@ -8,13 +8,13 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 - - uses: docker/login-action@v2 + - uses: actions/checkout@v4 + - uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build and push iroha2:dev-nightly image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: push: true tags: hyperledger/iroha2:dev-nightly-${{ github.sha }} diff --git a/.github/workflows/iroha2-dev-pr-label.yml b/.github/workflows/iroha2-dev-pr-label.yml index 946adb60e76..cbf581bd692 100644 --- a/.github/workflows/iroha2-dev-pr-label.yml +++ b/.github/workflows/iroha2-dev-pr-label.yml @@ -5,19 +5,18 @@ on: branches: [iroha-dev] paths: - 'docs/source/references/schema.json' - - 'docs/source/references/api_spec.md' - 'docs/source/references/config.md' jobs: api-changes: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: jitterbit/get-changed-files@v1 continue-on-error: true id: api_label - uses: actions-ecosystem/action-add-labels@v1 - if: contains(steps.api_label.outputs.added_modified, 'docs/source/references/schema.json') || contains(steps.api_label.outputs.added_modified, 'docs/source/references/api_spec.md') + if: contains(steps.api_label.outputs.added_modified, 'docs/source/references/schema.json') with: github_token: ${{ secrets.github_token }} labels: | @@ -26,7 +25,7 @@ jobs: config-changes: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: jitterbit/get-changed-files@v1 continue-on-error: true id: config_label diff --git a/.github/workflows/iroha2-dev-pr-static.yml b/.github/workflows/iroha2-dev-pr-static.yml index 4c84fedb4f6..7fadced88d1 100644 --- a/.github/workflows/iroha2-dev-pr-static.yml +++ b/.github/workflows/iroha2-dev-pr-static.yml @@ -9,9 +9,6 @@ on: - '**.toml' - '.github/workflows/**.yml' - # Not part of the workspace - - '!wasm/**' - concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true @@ -25,17 +22,17 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Format run: cargo fmt --all -- --check - name: Lints without features if: always() - run: cargo lints clippy --workspace --benches --tests --examples --no-default-features --quiet + run: cargo clippy -Zlints --workspace --benches --tests --examples --no-default-features --quiet - name: Lints with all features enabled if: always() - run: cargo lints clippy --workspace --benches --tests --examples --all-features --quiet + run: cargo clippy -Zlints --workspace --benches --tests --examples --all-features --quiet - name: Documentation if: always() run: cargo doc --no-deps --quiet diff --git a/.github/workflows/iroha2-dev-pr-ui.yml b/.github/workflows/iroha2-dev-pr-ui.yml index ef2881c9e84..603b1331c5a 100644 --- a/.github/workflows/iroha2-dev-pr-ui.yml +++ b/.github/workflows/iroha2-dev-pr-ui.yml @@ -26,7 +26,11 @@ jobs: matrix: feature_flag: [all-features, no-default-features] steps: - - uses: actions/checkout@v3 + - name: Maximize build space + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Run tests, with ${{ matrix.feature_flag }} run: | diff --git a/.github/workflows/iroha2-dev-pr-wasm.yaml b/.github/workflows/iroha2-dev-pr-wasm.yaml index ca9f785c51c..a5019907ccd 100644 --- a/.github/workflows/iroha2-dev-pr-wasm.yaml +++ b/.github/workflows/iroha2-dev-pr-wasm.yaml @@ -1,20 +1,19 @@ name: I2::Dev::Wasm -defaults: - run: - working-directory: wasm - on: pull_request: branches: [iroha2-dev] paths: - - 'wasm/**.rs' - - 'wasm/**.json' - - 'wasm/**.toml' - - 'wasm/**.yml' - 'data_model/**.rs' + - 'data_model/**.yml' + - 'data_model/**.json' - 'data_model/**.toml' + - 'smart_contract/**.rs' + - 'smart_contract/**.yml' + - 'smart_contract/**.json' + - 'smart_contract/**.toml' + concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true @@ -23,29 +22,15 @@ env: RUSTUP_TOOLCHAIN: nightly-2023-06-25 jobs: - static-analysis: - runs-on: ubuntu-latest - container: - image: hyperledger/iroha2-ci:nightly-2023-06-25 - steps: - - uses: actions/checkout@v3 - - uses: Swatinem/rust-cache@v2 - - - name: Format - run: cargo fmt --all -- --check - - name: Lints - if: always() - run: cargo lints clippy --workspace --benches --tests --examples --quiet - - name: Documentation - if: always() - run: cargo doc --no-deps --quiet - tests: runs-on: ubuntu-latest #[self-hosted, Linux] container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - - name: Run tests - run: mold --run cargo test --tests --no-fail-fast --quiet + - name: Install iroha_wasm_test_runner + run: cargo install --path tools/wasm_test_runner + - name: Run smart contract tests on WebAssembly VM + working-directory: smart_contract + run: mold --run cargo test --tests --target wasm32-unknown-unknown --no-fail-fast --quiet diff --git a/.github/workflows/iroha2-dev-pr.yml b/.github/workflows/iroha2-dev-pr.yml index 5356fb282a9..b78ede1a64a 100644 --- a/.github/workflows/iroha2-dev-pr.yml +++ b/.github/workflows/iroha2-dev-pr.yml @@ -9,46 +9,40 @@ on: - '**.toml' - '.github/workflows/**.yml' - # Not part of the workspace - - '!wasm/**' - concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true env: CARGO_TERM_COLOR: always + CLIENT_CLI_DIR: "/__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}/test" jobs: - check: + consistency: runs-on: [self-hosted, Linux, iroha2ci] container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Check config.md if: always() - run: ./scripts/check.sh docs + run: ./scripts/tests/consistency.sh docs - name: Check genesis.json if: always() - run: ./scripts/check.sh genesis + run: ./scripts/tests/consistency.sh genesis - name: Check client/config.json if: always() - run: ./scripts/check.sh client + run: ./scripts/tests/consistency.sh client - name: Check peer/config.json if: always() - run: ./scripts/check.sh peer + run: ./scripts/tests/consistency.sh peer - name: Check schema.json if: always() - run: ./scripts/check.sh schema + run: ./scripts/tests/consistency.sh schema - name: Check Docker Compose configurations if: always() - run: ./scripts/check.sh docker-compose - - name: Wasm build check - if: always() - working-directory: wasm - run: mold --run cargo build --target wasm32-unknown-unknown --quiet + run: ./scripts/tests/consistency.sh docker-compose with_coverage: runs-on: [self-hosted, Linux, iroha2ci] @@ -100,7 +94,7 @@ jobs: image: hyperledger/iroha2-ci:nightly-2023-06-25 timeout-minutes: 60 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Run tests run: mold --run cargo test -p iroha_client --tests --no-default-features unstable_network --quiet @@ -113,9 +107,9 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Login to Soramitsu Harbor - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: docker.soramitsu.co.jp username: ${{ secrets.HARBOR_USERNAME }} @@ -137,3 +131,34 @@ jobs: file: Dockerfile # This context specification is required context: . + + client-cli-tests: + runs-on: [self-hosted, Linux, iroha2ci] + container: + image: hyperledger/iroha2-ci:nightly-2023-06-25 + timeout-minutes: 60 + steps: + - uses: actions/checkout@v4 + - uses: Swatinem/rust-cache@v2 + - name: Build binaries + run: | + cargo build --bin iroha_client_cli + cargo build --bin kagami + cargo build --bin iroha + - name: Setup test Iroha 2 environment on the bare metal + run: | + ./scripts/test_env.py setup + - name: Mark binaries as executable + run: | + chmod +x ${{ env.CLIENT_CLI_DIR }} + - name: Install dependencies using Poetry + working-directory: client_cli/pytests + run: | + poetry install + - name: Run client cli tests + working-directory: client_cli/pytests + run: | + poetry run pytest + - name: Cleanup test environment + run: | + ./scripts/test_env.py cleanup diff --git a/.github/workflows/iroha2-dev.yml b/.github/workflows/iroha2-dev.yml index b2c1d0ec98e..995581a61af 100644 --- a/.github/workflows/iroha2-dev.yml +++ b/.github/workflows/iroha2-dev.yml @@ -13,13 +13,13 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 - - uses: docker/login-action@v2 + - uses: actions/checkout@v4 + - uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to Soramitsu Harbor - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: docker.soramitsu.co.jp username: ${{ secrets.HARBOR_USERNAME }} @@ -27,11 +27,11 @@ jobs: - name: Set up Docker Buildx id: buildx if: always() - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: install: true - name: Build and push iroha2:dev image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 if: always() with: push: true @@ -51,7 +51,7 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Build run: mold --run cargo build --release --verbose @@ -87,7 +87,7 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Run debug tests and save telemetry env: diff --git a/.github/workflows/iroha2-release-pr.yml b/.github/workflows/iroha2-release-pr.yml index 7959d47a6a6..6a0e0d9c87e 100644 --- a/.github/workflows/iroha2-release-pr.yml +++ b/.github/workflows/iroha2-release-pr.yml @@ -10,38 +10,50 @@ concurrency: env: CARGO_TERM_COLOR: always + CLIENT_CLI_DIR: "/__w/${{ github.event.repository.name }}/${{ github.event.repository.name }}/test" + ALLURE_RESULTS: "${{ github.workspace }}/allure-results" + ALLURE_JOB_RUN_ID: ${{ github.event.inputs.ALLURE_JOB_RUN_ID }} jobs: - cli: - runs-on: ubuntu-latest #[self-hosted, Linux] + client-cli-tests: + runs-on: [self-hosted, Linux, iroha2ci] container: image: hyperledger/iroha2-ci:nightly-2023-06-25 + timeout-minutes: 60 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - - name: Build iroha - working-directory: cli - run: mold --run cargo build - - name: Build iroha_client_cli - working-directory: client_cli - run: mold --run cargo build - - name: Build kagami - working-directory: tools/kagami - run: mold --run cargo build + - name: Install and configure allurectl + uses: allure-framework/setup-allurectl@v1 + with: + allure-endpoint: https://soramitsu.testops.cloud + allure-token: ${{ secrets.ALLURE_TOKEN }} + allure-project-id: 1 + - name: Build binaries + run: | + cargo build --bin iroha_client_cli + cargo build --bin kagami + cargo build --bin iroha + - name: Setup test Iroha 2 environment on bare metal + run: | + ./scripts/test_env.py setup - name: Mark binaries as executable run: | - chmod +x target/debug/iroha - chmod +x target/debug/iroha_client_cli - chmod +x target/debug/kagami - - name: Setup test environment - run: python3 './scripts/test_env.py setup' - - name: Genesis test - run: bash -c './scripts/tests/genesis.sh || (cat test/peers/iroha0/.log; false )' - - name: Basic register and mint - if: always() - run: bash -c './scripts/tests/register_mint_quantity.sh || (cat test/peers/iroha0/.log; false )' + chmod +x ${{ env.CLIENT_CLI_DIR }} + - name: Install dependencies using Poetry + working-directory: client_cli/pytests + run: | + poetry install + - name: Run client cli tests and upload results to Allure Test Ops + working-directory: client_cli/pytests + run: | + allurectl watch -- poetry run pytest --alluredir=${ALLURE_RESULTS} + printenv | grep GITHUB_TESTS_ + env: + GITHUB_TESTS_REF_NAME: ${{ github.ref_name }} - name: Cleanup test environment - run: python3 './scripts/test_env.py cleanup' + run: | + ./scripts/test_env.py cleanup - name: Panic on invalid genesis test run: bash -c './scripts/tests/panic_on_invalid_genesis.sh' @@ -50,7 +62,11 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - name: Maximize build space + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + - uses: actions/checkout@v4 - uses: Swatinem/rust-cache@v2 - name: Run benchmarks run: mold --run cargo bench --workspace --quiet @@ -62,9 +78,13 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - name: Maximize build space + run: | + sudo rm -rf /usr/share/dotnet + sudo rm -rf "$AGENT_TOOLSDIRECTORY" + - uses: actions/checkout@v4 - name: Set up JDK 11 - uses: actions/setup-java@v3.11.0 + uses: actions/setup-java@v3.13.0 with: java-version: '11' distribution: 'temurin' @@ -112,6 +132,16 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - name: Maximize build space + uses: jlumbroso/free-disk-space@v1.3.1 + with: + tool-cache: true + android: true + dotnet: true + haskell: true + large-packages: true + docker-images: false + swap-storage: true + - uses: actions/checkout@v4 - name: Run long tests run: mold --run cargo test --workspace --no-fail-fast -- --ignored --test-threads=1 long diff --git a/.github/workflows/iroha2-release.yml b/.github/workflows/iroha2-release.yml index e19fb17f9ce..8395c54af48 100644 --- a/.github/workflows/iroha2-release.yml +++ b/.github/workflows/iroha2-release.yml @@ -13,10 +13,10 @@ jobs: container: image: hyperledger/iroha2-ci:nightly-2023-06-25 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 with: install: true - name: Get tag from branch name @@ -30,21 +30,22 @@ jobs: RELEASE=$(curl -s https://raw.githubusercontent.com/hyperledger/iroha/${{ github.ref_name }}/Cargo.toml | sed -n '3p' | sed -e 's/version = "//g' -e 's/"$//' | tr -d '\n') echo "RELEASE=$RELEASE" >>$GITHUB_ENV - name: Login to DockerHub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to Soramitsu Harbor - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: docker.soramitsu.co.jp username: ${{ secrets.HARBOR_USERNAME }} password: ${{ secrets.HARBOR_TOKEN }} - name: Build and push iroha2 image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: push: true tags: | + hyperledger/iroha2:${{ env.TAG }} hyperledger/iroha2:${{ env.TAG }}-${{ env.RELEASE }} docker.soramitsu.co.jp/iroha2/iroha2:${{ env.TAG }}-${{ env.RELEASE }} labels: commit=${{ github.sha }} @@ -59,22 +60,23 @@ jobs: runs-on: ubuntu-latest container: image: hyperledger/iroha2-ci:nightly-2023-06-25 + permissions: + contents: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: ref: iroha2-dev - - name: Setup git config - run: | - cd .git - git config --local user.name "sorabot" - git config --local user.email "<>" + token: ${{ secrets.G_ACCESS_TOKEN }} - name: Update configs run: | ./scripts/update_configs.sh lts ./scripts/update_configs.sh stable - name: Commit config changes - run: | - git config --global --add safe.directory /__w/iroha/iroha - git add -A - git diff-index --quiet HEAD || git commit -m "[documentation]: Update lts/stable configs following a release" --signoff - git push origin iroha2-dev + uses: stefanzweifel/git-auto-commit-action@v5 + with: + commit_message: '[documentation]: Update lts/stable configs following a release' + branch: iroha2-dev + commit_options: '--signoff' + commit_user_name: sorabot + commit_user_email: <> + commit_author: sorabot diff --git a/.gitignore b/.gitignore index 2b7fbabbbc4..c17e43995f8 100644 --- a/.gitignore +++ b/.gitignore @@ -34,6 +34,7 @@ cmake-build-debug/* external/* core/infra/protobuf include/generated/* +.obsidian/ .scannerwork/ peers.list cmake-build* diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 959df2f8708..30e10f03099 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,103 @@ Changelog ========= +2.0.0-pre-rc.20 +--------------- + +Features +~~~~~~~~ +* make FindTrigger queries return original WASM +* Transfer `Domain` ownership +* `Domain` owner permissions +* Add `owned_by` field to `Domain` +* parse filter as JSON5 in `iroha_client_cli` (#3923) +* Add support for usage of Self type in serde partially tagged enums +* Standardize block API (#3884) +* Implement `Fast` kura init mode +* Add iroha_swarm disclaimer header +* initial support for WSV snapshots + +Bugfixes +~~~~~~~~ +* Fix executor downloading in update_configs.sh (#3990) +* proper rustc in devShell +* Fix burn `Trigger` reprtitions +* Fix transfer `AssetDefinition` +* Fix `RemoveKeyValue` for `Domain` +* Fix double free in wasm tests +* Fix the usage of `Span::join` +* Fix topology mismatch bug (#3903) +* Fix `apply_blocks` and `validate_blocks` benchmark +* Fix wasm memory leak +* `mkdir -r` with store path, not lock path (#3908) +* Don't fail if dir exists in test_env.py +* Fix authentication/authorization docstring (#3876) +* Better error message for query find error +* Add genesis account public key to dev docker compose +* Compare permission token payload as JSON (#3855) +* Fix `irrefutable_let_patterns` in the `#[model]` macro +* Allow genesis to execute any ISI (#3850) +* Fix genesis validation (#3844) +* Fix topology for 3 or less peers +* Correct how tx_amounts histogram is calculated. +* `genesis_transactions_are_validated()` test flakiness +* Default validator generation +* Fix iroha graceful shutdown + +Refactor +~~~~~ +* remove unused dependencies (#3992) +* remove optimized WASM from data_model +* bump dependencies (#3981) +* Rename validator to executor (#3976) +* Remove `IsAssetDefinitionOwner` (#3979) +* Include smart contract code into the workspace (#3944) +* Merge API and Telemetry endpoints into a single server +* move expression len out of public API into core (#3949) +* Avoid clone in roles lookup +* Range queries for roles +* Move account roles to `WSV` +* Rename ISI from *Box to *Expr (#3930) +* Remove 'Versioned' prefix from versioned containers (#3913) +* move `commit_topology` into block payload (#3916) +* Migrate iroha_futures_derive to syn 2.0 +* Registered with Identifiable in ISI bounds (#3925) +* Add basic generics support to `derive(HasOrigin)` +* Clean up Emitter APIs documentation to make clippy happy +* Add tests for derive(HasOrigin) macro, reduce repetition in derive(IdEqOrdHash), fix error reporting on stable +* Improve naming, simplify repeated .filter_maps & get rid of unnecessary .except in derive(Filter) +* Make PartiallyTaggedSerialize/Deserialize use darling +* Make derive(IdEqOrdHash) use darling, add tests +* Make derive(Filter) use darling +* Update iroha_data_model_derive to use syn 2.0 +* Add signature check condition unit tests +* Allow only a fixed set of signature verification conditions +* Generalize ConstBytes into a ConstVec that holds any const sequence +* Use a more efficient representation for bytes values that are not changing +* Store finalized wsv in snapshot +* Add `SnapshotMaker` actor +* document limitation of parsing derives in proc macros +* clean up comments +* extract a common test utility for parsing attributes to lib.rs +* use parse_display & update Attr -> Attrs naming +* allow usage of pattern matching in ffi function args +* reduce repetition in getset attrs parsing +* rename Emitter::into_token_stream into Emitter::finish_token_stream +* Use parse_display to parse getset tokens +* Fix typos and improve error messages +* iroha_ffi_derive: use darling to parse attributes and use syn 2.0 +* iroha_ffi_derive: replace proc-macro-error with manyhow +* Simplify kura lock file code +* make all numeric values serialize as string literals +* Split off Kagami (#3841) +* Rewrite `scripts/test-env.sh` +* Differentiate between smart contract and trigger entrypoints +* Elide `.cloned()` in `data_model/src/block.rs` +* Wasm entrypoint payloads +* Make wasm entrypoint names to be public constants +* update `iroha_schema_derive` to use syn 2.0 +* store original contract WASM in TriggerSet + 2.0.0-pre-rc.19 --------------- diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 907b4bf321e..2fc9c77a9be 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -304,7 +304,7 @@ Code guidelines: - Avoid `Box` if possible (we prefer strong typing). - If your function is a getter/setter, mark it `#[inline]`. - If your function is a constructor (i.e., it's creating a new value from the input parameters and calls `default()`), mark it `#[inline]`. -- Avoid tying your code to concrete data structures; `rustc` is smart enough to turn a `Vec` into `impl IntoIterator` and vice versa when it needs to. +- Avoid tying your code to concrete data structures; `rustc` is smart enough to turn a `Vec` into `impl IntoIterator` and vice versa when it needs to. Naming guidelines: - Use only full words in *public* structure, variable, method, trait, constant, and module names. However, abbreviations are allowed if: diff --git a/Cargo.lock b/Cargo.lock index 53d938da852..14830f7adea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,18 +4,9 @@ version = 3 [[package]] name = "addr2line" -version = "0.19.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a76fd60b23679b7d19bd066031410fb7e458ccc5e958eb5c325888ce4baedc97" -dependencies = [ - "gimli", -] - -[[package]] -name = "addr2line" -version = "0.20.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" dependencies = [ "gimli", ] @@ -27,57 +18,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] -name = "aead" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fc95d1bdb8e6666b2b217308eeeb09f2d6728d104be3e31916cc74d15420331" -dependencies = [ - "generic-array 0.14.7", -] - -[[package]] -name = "aes" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "884391ef1066acaa41e766ba8f596341b96e93ce34f9a43e7d24bf0a0eaf0561" -dependencies = [ - "aes-soft", - "aesni", - "cipher", -] - -[[package]] -name = "aes-gcm" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5278b5fabbb9bd46e24aa69b2fdea62c99088e0a950a9be40e3e0101298f88da" -dependencies = [ - "aead", - "aes", - "cipher", - "ctr", - "ghash", - "subtle", -] - -[[package]] -name = "aes-soft" -version = "0.6.4" +name = "adler32" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be14c7498ea50828a38d0e24a765ed2effe92a705885b57d029cd67d45744072" -dependencies = [ - "cipher", - "opaque-debug 0.3.0", -] +checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234" [[package]] -name = "aesni" -version = "0.10.0" +name = "aead" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2e11f5e94c2f7d386164cc2aa1f97823fed6f259e486940a71c174dd01b0ce" +checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" dependencies = [ - "cipher", - "opaque-debug 0.3.0", + "crypto-common", + "generic-array 0.14.7", ] [[package]] @@ -94,9 +47,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.0.2" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43f6cb1bf222025340178f382c426f13757b2960e89779dfcb319c32542a5a41" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] @@ -121,7 +74,7 @@ dependencies = [ "serde", "serde_bytes", "serde_json", - "sha3 0.8.2", + "sha3", "subtle-encoding", "zeroize", ] @@ -141,32 +94,37 @@ dependencies = [ "libc", ] +[[package]] +name = "anes" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" + [[package]] name = "anstream" -version = "0.3.2" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is-terminal", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "317b9a89c1868f5ea6ff1d9539a69f45dffc21ce321ac1fd1160dfa48c8e2140" dependencies = [ "utf8parse", ] @@ -182,9 +140,9 @@ dependencies = [ [[package]] name = "anstyle-wincon" -version = "1.0.1" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628" dependencies = [ "anstyle", "windows-sys 0.48.0", @@ -192,15 +150,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.72" +version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arbitrary" -version = "1.3.0" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d098ff73c1ca148721f37baad5ea6a465a13f9573aba8641fbbbae8164a54e" +checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" [[package]] name = "arc-swap" @@ -239,33 +197,33 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "async-trait" -version = "0.1.71" +version = "0.1.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a564d521dd56509c4c47480d00b80ee55f7e385ae48db5744c67ad50c92d2ebf" +checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "attohttpc" -version = "0.18.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e69e13a99a7e6e070bb114f7ff381e58c7ccc188630121fc4c2fe4bcf24cd072" +checksum = "0f77d243921b0979fbbd728dd2d5162e68ac8252976797c24eb5b3a6af9090dc" dependencies = [ - "flate2", "http", "log", "native-tls", - "openssl", + "rustls", + "rustls-native-certs", "url", - "wildmatch", + "webpki-roots", ] [[package]] @@ -279,15 +237,6 @@ dependencies = [ "winapi", ] -[[package]] -name = "autocfg" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dde43e75fd43e8a1bf86103336bc699aa8d17ad1be60c76c0bdfd4828e19b78" -dependencies = [ - "autocfg 1.1.0", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -296,9 +245,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "axum" -version = "0.6.19" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a1de45611fdb535bfde7b7de4fd54f4fd2b17b1737c0a59b69bf9b92074b8c" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ "async-trait", "axum-core", @@ -341,19 +290,25 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.68" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" dependencies = [ - "addr2line 0.20.0", + "addr2line", "cc", "cfg-if", "libc", "miniz_oxide", - "object 0.31.1", + "object", "rustc-demangle", ] +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + [[package]] name = "base64" version = "0.13.1" @@ -362,9 +317,15 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.2" +version = "0.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" + +[[package]] +name = "base64ct" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic-toml" @@ -407,9 +368,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" [[package]] name = "bitvec" @@ -425,13 +386,11 @@ dependencies = [ [[package]] name = "blake2" -version = "0.9.2" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4e37d16930f5459780f5621038b6382b9bb37c19016f39fb6b5808d831f174" +checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" dependencies = [ - "crypto-mac 0.8.0", - "digest 0.9.0", - "opaque-debug 0.3.0", + "digest 0.10.7", ] [[package]] @@ -440,22 +399,12 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" dependencies = [ - "block-padding 0.1.5", + "block-padding", "byte-tools", "byteorder", "generic-array 0.12.4", ] -[[package]] -name = "block-buffer" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" -dependencies = [ - "block-padding 0.2.1", - "generic-array 0.14.7", -] - [[package]] name = "block-buffer" version = "0.10.4" @@ -465,16 +414,6 @@ dependencies = [ "generic-array 0.14.7", ] -[[package]] -name = "block-modes" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a0e8073e8baa88212fb5823574c02ebccb395136ba9a164ab89379ec6072f0" -dependencies = [ - "block-padding 0.2.1", - "cipher", -] - [[package]] name = "block-padding" version = "0.1.5" @@ -484,20 +423,14 @@ dependencies = [ "byte-tools", ] -[[package]] -name = "block-padding" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" - [[package]] name = "bstr" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" +checksum = "c79ad7fb2dd38f3dabd76b09c6a5a20c038fc0213ef1e9afd30eb777f120f019" dependencies = [ "memchr", - "regex-automata 0.3.3", + "regex-automata 0.4.1", "serde", ] @@ -512,9 +445,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "byte-slice-cast" @@ -540,15 +473,15 @@ dependencies = [ [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] name = "c2-chacha" @@ -567,11 +500,12 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.79" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" dependencies = [ "jobserver", + "libc", ] [[package]] @@ -582,19 +516,20 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chacha20" -version = "0.6.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed8738f14471a99f0e316c327e68fc82a3611cc2895fcb604b89eedaf8f39d95" +checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" dependencies = [ + "cfg-if", "cipher", - "zeroize", + "cpufeatures", ] [[package]] name = "chacha20poly1305" -version = "0.7.1" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af1fc18e6d90c40164bf6c317476f2a98f04661e310e79830366b7e914c58a8e" +checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35" dependencies = [ "aead", "chacha20", @@ -605,37 +540,54 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.26" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", - "time 0.1.45", "wasm-bindgen", - "winapi", + "windows-targets 0.48.5", ] [[package]] -name = "cipher" -version = "0.2.5" +name = "ciborium" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f8e7987cbd042a63249497f41aed09f8e65add917ea6566effbc56578d6801" +checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" dependencies = [ - "generic-array 0.14.7", + "ciborium-io", + "ciborium-ll", + "serde", ] [[package]] -name = "clap" -version = "2.34.0" +name = "ciborium-io" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" + +[[package]] +name = "ciborium-ll" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" dependencies = [ - "bitflags 1.3.2", - "textwrap 0.11.0", - "unicode-width", + "ciborium-io", + "half", +] + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", + "zeroize", ] [[package]] @@ -652,30 +604,28 @@ dependencies = [ "once_cell", "strsim", "termcolor", - "textwrap 0.16.0", + "textwrap", ] [[package]] name = "clap" -version = "4.3.15" +version = "4.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f644d0dac522c8b05ddc39aaaccc5b136d5dc4ff216610c5641e3be5becf56c" +checksum = "d04704f56c2cde07f43e8e2c154b43f216dc5c92fc98ada720177362f953b956" dependencies = [ "clap_builder", - "clap_derive 4.3.12", - "once_cell", + "clap_derive 4.4.2", ] [[package]] name = "clap_builder" -version = "4.3.15" +version = "4.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af410122b9778e024f9e0fb35682cc09cc3f85cad5e8d3ba8f47a9702df6e73d" +checksum = "0e231faeaca65ebd1ea3c737966bf858971cd38c3849107aa3ea7de90a804e45" dependencies = [ "anstream", "anstyle", - "clap_lex 0.5.0", - "once_cell", + "clap_lex 0.5.1", "strsim", ] @@ -694,14 +644,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.3.12" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -715,9 +665,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.5.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" +checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" [[package]] name = "clru" @@ -794,10 +744,11 @@ dependencies = [ [[package]] name = "console-api" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2895653b4d9f1538a83970077cb01dfc77a4810524e51a110944688e916b18e" +checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" dependencies = [ + "futures-core", "prost", "prost-types", "tonic", @@ -806,14 +757,14 @@ dependencies = [ [[package]] name = "console-subscriber" -version = "0.1.10" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4cf42660ac07fcebed809cfe561dd8730bcd35b075215e6479c516bcd0d11cb" +checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e" dependencies = [ "console-api", "crossbeam-channel", "crossbeam-utils", - "futures", + "futures-task", "hdrhistogram", "humantime", "prost-types", @@ -830,9 +781,9 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.6.2" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6f2aa4d0537bcc1c74df8755072bd31c1ef1a3a1b85a68e8404a8c353b7b8b" +checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" [[package]] name = "core-foundation" @@ -850,6 +801,15 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +[[package]] +name = "core2" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b49ba7ef1ad6107f8824dbe97de947cbaac53c44e7f9756a1fba0d37c1eec505" +dependencies = [ + "memchr", +] + [[package]] name = "cpp_demangle" version = "0.3.5" @@ -868,26 +828,20 @@ dependencies = [ "libc", ] -[[package]] -name = "cpuid-bool" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcb25d077389e53838a8158c8e99174c5a9d902dee4904320db714f3c653ffba" - [[package]] name = "cranelift-bforest" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1380172556902242d32f78ed08c98aac4f5952aef22d3684aed5c66a5db0a6fc" +checksum = "76eb38f2af690b5a4411d9a8782b6d77dabff3ca939e0518453ab9f9a4392d41" dependencies = [ "cranelift-entity", ] [[package]] name = "cranelift-codegen" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "037cca234e1ad0766fdfe43b527ec14e100414b4ccf4bb614977aa9754958f57" +checksum = "39526c036b92912417e8931f52c1e235796688068d3efdbbd8b164f299d19156" dependencies = [ "bumpalo", "cranelift-bforest", @@ -897,7 +851,7 @@ dependencies = [ "cranelift-entity", "cranelift-isle", "gimli", - "hashbrown 0.13.2", + "hashbrown 0.14.1", "log", "regalloc2", "smallvec", @@ -906,42 +860,43 @@ dependencies = [ [[package]] name = "cranelift-codegen-meta" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d375e6afa8b9a304999ea8cf58424414b8e55e004571265a4f0826eba8b74f18" +checksum = "fdb0deedc9fccf2db53a5a3c9c9d0163e44143b0d004dca9bf6ab6a0024cd79a" dependencies = [ "cranelift-codegen-shared", ] [[package]] name = "cranelift-codegen-shared" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca590e72ccb8da963def6e36460cce4412032b1f03c31d1a601838d305abdc39" +checksum = "cea2d1b274e45aa8e61e9103efa1ba82d4b5a19d12bd1fd10744c3b7380ba3ff" [[package]] name = "cranelift-control" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d2d38eea4373639f4b6236a40f69820fed16c5511093cd3783bf8491a93d9cf" +checksum = "6ea5977559a71e63db79a263f0e81a89b996e8a38212c4281e37dd1dbaa8b65c" dependencies = [ "arbitrary", ] [[package]] name = "cranelift-entity" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e3173c1434af23c00e4964722cf93ca8f0e6287289bf5d52110597c3ba2ea09" +checksum = "2f871ada808b58158d84dfc43a6a2e2d2756baaf4ed1c51fd969ca8330e6ca5c" dependencies = [ "serde", + "serde_derive", ] [[package]] name = "cranelift-frontend" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aec4a3a33825062eccf6eec73e852c8773220f6e4798925e19696562948beb1f" +checksum = "e8e6890f587ef59824b3debe577e68fdf9b307b3808c54b8d93a18fd0b70941b" dependencies = [ "cranelift-codegen", "log", @@ -951,15 +906,15 @@ dependencies = [ [[package]] name = "cranelift-isle" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5146b5cea4b21095a021d964b0174cf6ff5530f83e8d0a822683c7559e360b66" +checksum = "a8d5fc6d5d3b52d1917002b17a8ecce448c2621b5bf394bb4e77e2f676893537" [[package]] name = "cranelift-native" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21cec3717ce554d3936b2101aa8eae1a2a410bd6da0f4df698a4b008fe9cf1e9" +checksum = "3e10c2e7faa65d4ae7de9a83b44f2c31aca7dc638e17d0a79572fdf8103d720b" dependencies = [ "cranelift-codegen", "libc", @@ -968,14 +923,14 @@ dependencies = [ [[package]] name = "cranelift-wasm" -version = "0.98.1" +version = "0.102.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7fd2f9f1bf29ce6639ae2f477a2fe20bad0bd09289df13efeb890e8e4b9f807" +checksum = "2755807efc7ec80d1cc0b6815e70f10cedf968889f0469091dbff9c5c0741c48" dependencies = [ "cranelift-codegen", "cranelift-entity", "cranelift-frontend", - "itertools", + "itertools 0.10.5", "log", "smallvec", "wasmparser", @@ -993,24 +948,24 @@ dependencies = [ [[package]] name = "criterion" -version = "0.3.6" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" +checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" dependencies = [ - "atty", + "anes", "cast", - "clap 2.34.0", + "ciborium", + "clap 4.4.6", "criterion-plot", - "csv", - "itertools", - "lazy_static", + "is-terminal", + "itertools 0.10.5", "num-traits", + "once_cell", "oorandom", "plotters", "rayon", "regex", "serde", - "serde_cbor", "serde_derive", "serde_json", "tinytemplate", @@ -1019,12 +974,12 @@ dependencies = [ [[package]] name = "criterion-plot" -version = "0.4.5" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" +checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" dependencies = [ "cast", - "itertools", + "itertools 0.10.5", ] [[package]] @@ -1068,10 +1023,10 @@ version = "0.9.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ - "autocfg 1.1.0", + "autocfg", "cfg-if", "crossbeam-utils", - "memoffset 0.9.0", + "memoffset", "scopeguard", ] @@ -1121,9 +1076,9 @@ dependencies = [ [[package]] name = "crypto-bigint" -version = "0.2.11" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f83bd3bb4314701c568e340cd8cf78c975aa0ca79e03d3f6d1677d5b0c9c0c03" +checksum = "740fe28e594155f10cfc383984cbefd529d7396050557148f79cb0f621204124" dependencies = [ "generic-array 0.14.7", "rand_core 0.6.4", @@ -1138,77 +1093,43 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array 0.14.7", + "rand_core 0.6.4", "typenum", ] [[package]] -name = "crypto-mac" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b584a330336237c1eecd3e94266efb216c56ed91225d634cb2991c5f3fd1aeab" -dependencies = [ - "generic-array 0.14.7", - "subtle", -] - -[[package]] -name = "crypto-mac" -version = "0.11.1" +name = "curve25519-dalek" +version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1d1a86f49236c215f271d40892d5fc950490551400b02ef360692c29815c714" +checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" dependencies = [ - "generic-array 0.14.7", + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version", "subtle", + "zeroize", ] [[package]] -name = "csv" -version = "1.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "626ae34994d3d8d668f4269922248239db4ae42d538b14c398b74a52208e8086" -dependencies = [ - "csv-core", - "itoa", - "ryu", - "serde", -] - -[[package]] -name = "csv-core" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2466559f260f48ad25fe6317b3c8dac77b5bdb5763ac7d9d6103530663bc90" -dependencies = [ - "memchr", -] - -[[package]] -name = "ctr" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb4a30d54f7443bf3d6191dcd486aca19e67cb3c49fa7a06a319966346707e7f" -dependencies = [ - "cipher", -] - -[[package]] -name = "curve25519-dalek" -version = "3.2.1" +name = "curve25519-dalek-derive" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90f9d052967f590a76e62eb387bd0bbb1b000182c3cefe5364db6b7211651bc0" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "byteorder", - "digest 0.9.0", - "rand_core 0.5.1", - "subtle", - "zeroize", + "proc-macro2", + "quote", + "syn 2.0.38", ] [[package]] name = "cxx" -version = "1.0.101" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5032837c1384de3708043de9d4e97bb91290faca6c16529a28aa340592a78166" +checksum = "292b4841d939b20ba44fff686a35808b0ab31a3256e3629917d9aedd43eb7b3a" dependencies = [ "cc", "cxxbridge-flags", @@ -1218,9 +1139,9 @@ dependencies = [ [[package]] name = "cxx-build" -version = "1.0.101" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51368b3d0dbf356e10fcbfd455a038503a105ee556f7ee79b6bb8c53a7247456" +checksum = "8e7e35cf85fd4e90dcaba251f3ee95e08fb6f9d66e5c0588816f16a6ab939b40" dependencies = [ "cc", "codespan-reporting", @@ -1228,24 +1149,24 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "cxxbridge-flags" -version = "1.0.101" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d9062157072e4aafc8e56ceaf8325ce850c5ae37578c852a0d4de2cecdded13" +checksum = "d7030aff1908ba2b7eb639466df50792b2a3fdf02bea9557c4ee1a531975554b" [[package]] name = "cxxbridge-macro" -version = "1.0.101" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf01e8a540f5a4e0f284595834f81cf88572f244b768f051724537afa99a2545" +checksum = "79418ecb0c2322a7926a5fa5a9660535432b5b3588b947e1eb484cc509edbe3c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -1269,7 +1190,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -1280,22 +1201,34 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core", "quote", - "syn 2.0.26", + "syn 2.0.38", ] +[[package]] +name = "dary_heap" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7762d17f1241643615821a8455a0b2c3e803784b058693d990b11f2dce25a0ca" + [[package]] name = "dashmap" -version = "5.5.0" +version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6943ae99c34386c84a470c499d3414f66502a41340aa895406e0d2e4a207b91d" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown 0.14.0", + "hashbrown 0.14.1", "lock_api", "once_cell", "parking_lot_core", ] +[[package]] +name = "data-encoding" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" + [[package]] name = "debugid" version = "0.8.0" @@ -1307,13 +1240,20 @@ dependencies = [ [[package]] name = "der" -version = "0.4.5" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79b71cca7d95d7681a4b3b9cdf63c8dbc3730d0584c2c74e31416d64a90493f4" +checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" dependencies = [ "const-oid", + "zeroize", ] +[[package]] +name = "deranged" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" + [[package]] name = "derive_more" version = "0.99.17" @@ -1327,12 +1267,13 @@ dependencies = [ [[package]] name = "dialoguer" -version = "0.10.4" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59c6f2989294b9a498d3ad5491a79c6deb604617378e1cdc4bfc1c1361fe2f87" +checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de" dependencies = [ "console", "shell-words", + "thiserror", ] [[package]] @@ -1344,15 +1285,6 @@ dependencies = [ "generic-array 0.12.4", ] -[[package]] -name = "digest" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066" -dependencies = [ - "generic-array 0.14.7", -] - [[package]] name = "digest" version = "0.10.7" @@ -1360,7 +1292,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ "block-buffer 0.10.4", + "const-oid", "crypto-common", + "subtle", ] [[package]] @@ -1392,7 +1326,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -1415,63 +1349,69 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "dyn-clone" -version = "1.0.12" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "304e6508efa593091e97a9abbc10f90aa7ca635b6d2784feff3c89d41dd12272" +checksum = "23d2f3407d9a573d666de4b5bdf10569d73ca9478087346697dcbae6244bfbcd" [[package]] name = "ecdsa" -version = "0.12.4" +version = "0.16.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43ee23aa5b4f68c7a092b5c3beb25f50c406adc75e2363634f242f28ab255372" +checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" dependencies = [ "der", + "digest 0.10.7", "elliptic-curve", - "hmac", + "rfc6979", "signature", + "spki", ] [[package]] name = "ed25519" -version = "1.5.3" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91cff35c70bba8a626e3185d8cd48cc11b5437e1a5bcd15b9b5fa3c64b6dfee7" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ + "pkcs8", "signature", ] [[package]] name = "ed25519-dalek" -version = "1.0.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" dependencies = [ "curve25519-dalek", "ed25519", - "rand 0.7.3", + "rand_core 0.6.4", "serde", - "sha2 0.9.9", + "sha2", "zeroize", ] [[package]] name = "either" -version = "1.8.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fcaabb2fef8c910e7f4c7ce9f67a1283a1715879a7c230ca9d6d1ae31f16d91" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "elliptic-curve" -version = "0.10.6" +version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beca177dcb8eb540133e7680baff45e7cc4d93bf22002676cec549f82343721b" +checksum = "d97ca172ae9dc9f9b779a6e3a65d308f2af74e5b8c921299075bdb4a0370e914" dependencies = [ + "base16ct", "crypto-bigint", + "digest 0.10.7", "ff", "generic-array 0.14.7", "group", "pkcs8", "rand_core 0.6.4", + "sec1", "subtle", "zeroize", ] @@ -1484,26 +1424,13 @@ checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" [[package]] name = "encoding_rs" -version = "0.8.32" +version = "0.8.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" dependencies = [ "cfg-if", ] -[[package]] -name = "env_logger" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85cdab6a89accf66733ad5a1693a4dcced6aeff64602b634530dd73c1f3ee9f0" -dependencies = [ - "humantime", - "is-terminal", - "log", - "regex", - "termcolor", -] - [[package]] name = "equivalent" version = "1.0.1" @@ -1512,34 +1439,23 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" -version = "0.3.28" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da96524cc884f6558f1769b6c46686af2fe8e8b4cd253bd5a3cdba8181b8e070" +checksum = "6c138974f9d5e7fe373eb04df7cae98833802ae4b11c24ac7039a21d5af4b26c" dependencies = [ "serde", ] [[package]] name = "errno" -version = "0.3.1" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" +checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" dependencies = [ - "errno-dragonfly", "libc", "windows-sys 0.48.0", ] -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "expect-test" version = "1.4.1" @@ -1560,78 +1476,52 @@ dependencies = [ "once_cell", ] -[[package]] -name = "failure" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d32e9bd16cc02eae7db7ef620b392808b89f6a5e16bb3497d159c6b92a0f4f86" -dependencies = [ - "backtrace", - "failure_derive", -] - -[[package]] -name = "failure_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "synstructure", -] - [[package]] name = "fallible-iterator" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4443176a9f2c162692bd3d352d745ef9413eec5782a80d8fd6f8a1ac692a07f7" +checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" [[package]] -name = "fastrand" -version = "1.9.0" +name = "faster-hex" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +checksum = "239f7bfb930f820ab16a9cd95afc26f88264cf6905c960b340a615384aa3338a" dependencies = [ - "instant", + "serde", ] [[package]] name = "fastrand" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "ff" -version = "0.10.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f40b2dcd8bc322217a5f6559ae5f9e9d1de202a2ecee2e9eafcbece7562a4f" +checksum = "ded41244b729663b1e574f1b4fb731469f69f79c17667b5d776b16cda0479449" dependencies = [ "rand_core 0.6.4", "subtle", ] [[package]] -name = "file-per-thread-logger" -version = "0.2.0" +name = "fiat-crypto" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a3cc21c33af89af0930c8cae4ade5e6fdc17b5d2c97b3d2e2edb67a1cf683f3" -dependencies = [ - "env_logger", - "log", -] +checksum = "a481586acf778f1b1455424c343f71124b048ffa5f4fc3f8f6ae9dc432dcb3c7" [[package]] name = "filetime" -version = "0.2.21" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153" +checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.2.16", + "redox_syscall 0.3.5", "windows-sys 0.48.0", ] @@ -1657,9 +1547,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" dependencies = [ "crc32fast", "miniz_oxide", @@ -1757,7 +1647,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -1805,7 +1695,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27d12c0aed7f1e24276a241aadc4cb8ea9f83000f34bc062b7cc2d51e3b0fabd" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "debugid", "fxhash", "serde", @@ -1829,6 +1719,7 @@ checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" dependencies = [ "typenum", "version_check", + "zeroize", ] [[package]] @@ -1848,10 +1739,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ "cfg-if", - "js-sys", "libc", "wasi 0.9.0+wasi-snapshot-preview1", - "wasm-bindgen", ] [[package]] @@ -1877,38 +1766,26 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "ghash" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97304e4cd182c3846f7575ced3890c53012ce534ad9114046b0a9e00bb30a375" -dependencies = [ - "opaque-debug 0.3.0", - "polyval", -] - [[package]] name = "gimli" -version = "0.27.3" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" dependencies = [ "fallible-iterator", - "indexmap 1.9.3", + "indexmap 2.0.2", "stable_deref_trait", ] [[package]] name = "gix" -version = "0.48.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e74cea676de7f53a79f3c0365812b11f6814b81e671b8ee4abae6ca09c7881" +checksum = "06a8c9f9452078f474fecd2880de84819b8c77224ab62273275b646bf785f906" dependencies = [ "gix-actor", - "gix-attributes", "gix-commitgraph", "gix-config", - "gix-credentials", "gix-date", "gix-diff", "gix-discover", @@ -1917,19 +1794,17 @@ dependencies = [ "gix-glob", "gix-hash", "gix-hashtable", - "gix-ignore", "gix-index", "gix-lock", - "gix-mailmap", - "gix-negotiate", + "gix-macros", "gix-object", "gix-odb", "gix-pack", "gix-path", - "gix-prompt", "gix-ref", "gix-refspec", "gix-revision", + "gix-revwalk", "gix-sec", "gix-tempfile", "gix-trace", @@ -1937,9 +1812,8 @@ dependencies = [ "gix-url", "gix-utils", "gix-validate", - "gix-worktree", - "log", "once_cell", + "parking_lot", "signal-hook", "smallvec", "thiserror", @@ -1948,67 +1822,41 @@ dependencies = [ [[package]] name = "gix-actor" -version = "0.23.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1969b77b9ee4cc1755c841987ec6f7622aaca95e952bcafb76973ae59d1b8716" +checksum = "8e8c6778cc03bca978b2575a03e04e5ba6f430a9dd9b0f1259f0a8a9a5e5cc66" dependencies = [ "bstr", "btoi", "gix-date", "itoa", - "nom", - "thiserror", -] - -[[package]] -name = "gix-attributes" -version = "0.14.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3772b0129dcd1fc73e985bbd08a1482d082097d2915cb1ee31ce8092b8e4434" -dependencies = [ - "bstr", - "gix-glob", - "gix-path", - "gix-quote", - "kstring", - "log", - "smallvec", "thiserror", - "unicode-bom", + "winnow", ] [[package]] name = "gix-bitmap" -version = "0.2.5" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "311e2fa997be6560c564b070c5da2d56d038b645a94e1e5796d5d85a350da33c" +checksum = "0ccab4bc576844ddb51b78d81b4a42d73e6229660fa614dfc3d3999c874d1959" dependencies = [ "thiserror", ] [[package]] name = "gix-chunk" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39db5ed0fc0a2e9b1b8265993f7efdbc30379dec268f3b91b7af0c2de4672fdd" +checksum = "5b42ea64420f7994000130328f3c7a2038f639120518870436d31b8bde704493" dependencies = [ "thiserror", ] -[[package]] -name = "gix-command" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb49ab557a37b0abb2415bca2b10e541277dff0565deb5bd5e99fd95f93f51eb" -dependencies = [ - "bstr", -] - [[package]] name = "gix-commitgraph" -version = "0.17.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed42baa50075d41c1a0931074ce1a97c5797c7c6fe7591d9f1f2dcd448532c26" +checksum = "4676ede3a7d37e7028e2889830349a6aca22efc1d2f2dd9fa3351c1a8ddb0c6a" dependencies = [ "bstr", "gix-chunk", @@ -2020,9 +1868,9 @@ dependencies = [ [[package]] name = "gix-config" -version = "0.25.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "817688c7005a716d9363e267913526adea402dabd947f4ba63842d10cc5132af" +checksum = "1108c4ac88248dd25cc8ab0d0dae796e619fb72d92f88e30e00b29d61bb93cc4" dependencies = [ "bstr", "gix-config-value", @@ -2031,73 +1879,55 @@ dependencies = [ "gix-path", "gix-ref", "gix-sec", - "log", "memchr", - "nom", "once_cell", "smallvec", "thiserror", "unicode-bom", + "winnow", ] [[package]] name = "gix-config-value" -version = "0.12.3" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83960be5e99266bcf55dae5a24731bbd39f643bfb68f27e939d6b06836b5b87d" +checksum = "ea7505b97f4d8e7933e29735a568ba2f86d8de466669d9f0e8321384f9972f47" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "bstr", "gix-path", "libc", "thiserror", ] -[[package]] -name = "gix-credentials" -version = "0.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75a75565e0e6e7f80cfa4eb1b05cc448c6846ddd48dcf413a28875fbc11ee9af" -dependencies = [ - "bstr", - "gix-command", - "gix-config-value", - "gix-path", - "gix-prompt", - "gix-sec", - "gix-url", - "thiserror", -] - [[package]] name = "gix-date" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9a04a1d2387c955ec91059d56b673000dd24f3c07cad08ed253e36381782bf" +checksum = "fc7df669639582dc7c02737642f76890b03b5544e141caba68a7d6b4eb551e0d" dependencies = [ "bstr", "itoa", "thiserror", - "time 0.3.23", + "time", ] [[package]] name = "gix-diff" -version = "0.32.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aaf5d9b9b521b284ebe53ee69eee33341835ec70edc314f36b2100ea81396121" +checksum = "b45e342d148373bd9070d557e6fb1280aeae29a3e05e32506682d027278501eb" dependencies = [ "gix-hash", "gix-object", - "imara-diff", "thiserror", ] [[package]] name = "gix-discover" -version = "0.21.1" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "272aad20dc63dedba76615373dd8885fb5aebe4795e5b5b0aa2a24e63c82085c" +checksum = "da4cacda5ee9dd1b38b0e2506834e40e66c08cf050ef55c344334c76745f277b" dependencies = [ "bstr", "dunce", @@ -2110,9 +1940,9 @@ dependencies = [ [[package]] name = "gix-features" -version = "0.31.1" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06142d8cff5d17509399b04052b64d2f9b3a311d5cff0b1a32b220f62cd0d595" +checksum = "f414c99e1a7abc69b21f3225a6539d203b0513f1d1d448607c4ea81cdcf9ee59" dependencies = [ "crc32fast", "flate2", @@ -2128,20 +1958,20 @@ dependencies = [ [[package]] name = "gix-fs" -version = "0.3.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb15956bc0256594c62a2399fcf6958a02a11724217eddfdc2b49b21b6292496" +checksum = "404795da3d4c660c9ab6c3b2ad76d459636d1e1e4b37b0c7ff68eee898c298d4" dependencies = [ "gix-features", ] [[package]] name = "gix-glob" -version = "0.9.1" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c18bdff83143d61e7d60da6183b87542a870d026b2a2d0b30170b8e9c0cd321a" +checksum = "e3ac79c444193b0660fe0c0925d338bd338bd643e32138784dccfb12c628b892" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "bstr", "gix-features", "gix-path", @@ -2149,49 +1979,38 @@ dependencies = [ [[package]] name = "gix-hash" -version = "0.11.3" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0dd58cdbe7ffa4032fc111864c80d5f8cecd9a2c9736c97ae7e5be834188272" +checksum = "2ccf425543779cddaa4a7c62aba3fa9d90ea135b160be0a72dd93c063121ad4a" dependencies = [ - "hex", + "faster-hex", "thiserror", ] [[package]] name = "gix-hashtable" -version = "0.2.3" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e133bc56d938eaec1c675af7c681a51de9662b0ada779f45607b967a10da77a" +checksum = "409268480841ad008e81c17ca5a293393fbf9f2b6c2f85b8ab9de1f0c5176a16" dependencies = [ "gix-hash", - "hashbrown 0.14.0", + "hashbrown 0.14.1", "parking_lot", ] -[[package]] -name = "gix-ignore" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca801f2d0535210f77b33e2c067d565aedecacc82f1b3dbce26da1388ebc4634" -dependencies = [ - "bstr", - "gix-glob", - "gix-path", - "unicode-bom", -] - [[package]] name = "gix-index" -version = "0.20.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68099abdf6ee50ae3c897e8b05de96871cbe54d52a37cdf559101f911b883562" +checksum = "0e9599fc30b3d6aad231687a403f85dfa36ae37ccf1b68ee1f621ad5b7fc7a0d" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "bstr", "btoi", "filetime", "gix-bitmap", "gix-features", + "gix-fs", "gix-hash", "gix-lock", "gix-object", @@ -2204,9 +2023,9 @@ dependencies = [ [[package]] name = "gix-lock" -version = "7.0.1" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "714bcb13627995ac33716e9c5e4d25612b19947845395f64d2a9cbe6007728e4" +checksum = "1568c3d90594c60d52670f325f5db88c2d572e85c8dd45fabc23d91cadb0fd52" dependencies = [ "gix-tempfile", "gix-utils", @@ -2214,38 +2033,21 @@ dependencies = [ ] [[package]] -name = "gix-mailmap" -version = "0.15.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1787e3c37fc43b1f7c0e3be6196c6837b3ba5f869190dfeaa444b816f0a7f34b" -dependencies = [ - "bstr", - "gix-actor", - "gix-date", - "thiserror", -] - -[[package]] -name = "gix-negotiate" -version = "0.4.0" +name = "gix-macros" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e7bce64d4452dd609f44d04b14b29da2e0ad2c45fcdf4ce1472a5f5f8ec21c2" +checksum = "9d8acb5ee668d55f0f2d19a320a3f9ef67a6999ad483e11135abcc2464ed18b6" dependencies = [ - "bitflags 2.3.3", - "gix-commitgraph", - "gix-date", - "gix-hash", - "gix-object", - "gix-revwalk", - "smallvec", - "thiserror", + "proc-macro2", + "quote", + "syn 2.0.38", ] [[package]] name = "gix-object" -version = "0.32.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a953f3d7ffad16734aa3ab1d05807972c80e339d1bd9dde03e0198716b99e2a6" +checksum = "3e5528d5b2c984044d547e696e44a8c45fa122e83cd8c2ac1da69bd474336be8" dependencies = [ "bstr", "btoi", @@ -2254,18 +2056,17 @@ dependencies = [ "gix-features", "gix-hash", "gix-validate", - "hex", "itoa", - "nom", "smallvec", "thiserror", + "winnow", ] [[package]] name = "gix-odb" -version = "0.49.1" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6418cff00ecc2713b58c8e04bff30dda808fbba1a080e7248b299d069894a01" +checksum = "d0446eca295459deb3d6dd6ed7d44a631479f1b7381d8087166605c7a9f717c6" dependencies = [ "arc-swap", "gix-date", @@ -2282,20 +2083,18 @@ dependencies = [ [[package]] name = "gix-pack" -version = "0.39.1" +version = "0.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "414935138d90043ea5898de7a93f02c2558e52652492719470e203ef26a8fd0a" +checksum = "be19ee650300d7cbac5829b637685ec44a8d921a7c2eaff8a245d8f2f008870c" dependencies = [ "clru", "gix-chunk", - "gix-diff", "gix-features", "gix-hash", "gix-hashtable", "gix-object", "gix-path", "gix-tempfile", - "gix-traverse", "memmap2", "parking_lot", "smallvec", @@ -2304,9 +2103,9 @@ dependencies = [ [[package]] name = "gix-path" -version = "0.8.3" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfca182d2575ded2ed38280f1ebf75cd5d3790b77e0872de07854cf085821fbe" +checksum = "6a1d370115171e3ae03c5c6d4f7d096f2981a40ddccb98dfd704c773530ba73b" dependencies = [ "bstr", "gix-trace", @@ -2315,24 +2114,11 @@ dependencies = [ "thiserror", ] -[[package]] -name = "gix-prompt" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8dfd363fd89a40c1e7bff9c9c1b136cd2002480f724b0c627c1bc771cd5480ec" -dependencies = [ - "gix-command", - "gix-config-value", - "parking_lot", - "rustix 0.37.23", - "thiserror", -] - [[package]] name = "gix-quote" -version = "0.4.5" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3874de636c2526de26a3405b8024b23ef1a327bebf4845d770d00d48700b6a40" +checksum = "475c86a97dd0127ba4465fbb239abac9ea10e68301470c9791a6dd5351cdc905" dependencies = [ "bstr", "btoi", @@ -2341,9 +2127,9 @@ dependencies = [ [[package]] name = "gix-ref" -version = "0.32.1" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39453f4e5f23cddc2e6e4cca2ba20adfdbec29379e3ca829714dfe98ae068ccd" +checksum = "3cccbfa8d5cd9b86465f27a521e0c017de54b92d9fd37c143e49c658a2f04f3a" dependencies = [ "gix-actor", "gix-date", @@ -2356,15 +2142,15 @@ dependencies = [ "gix-tempfile", "gix-validate", "memmap2", - "nom", "thiserror", + "winnow", ] [[package]] name = "gix-refspec" -version = "0.13.0" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8e76ff1f82fba295a121e31ab02f69642994e532c45c0c899aa393f4b740302" +checksum = "678ba30d95baa5462df9875628ed40655d5f5b8aba7028de86ed57f36e762c6c" dependencies = [ "bstr", "gix-hash", @@ -2376,9 +2162,9 @@ dependencies = [ [[package]] name = "gix-revision" -version = "0.17.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "237428a7d3978e8572964e1e45d984027c2acc94df47e594baa6c4b0da7c9922" +checksum = "b3e80a5992ae446fe1745dd26523b86084e3f1b6b3e35377fe09b4f35ac8f151" dependencies = [ "bstr", "gix-date", @@ -2386,14 +2172,15 @@ dependencies = [ "gix-hashtable", "gix-object", "gix-revwalk", + "gix-trace", "thiserror", ] [[package]] name = "gix-revwalk" -version = "0.3.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "028d50fcaf8326a8f79a359490d9ca9fb4e2b51ac9ac86503560d0bcc888d2eb" +checksum = "b806349bc1f668e09035800e07ac8045da4e39a8925a245d93142c4802224ec1" dependencies = [ "gix-commitgraph", "gix-date", @@ -2406,11 +2193,11 @@ dependencies = [ [[package]] name = "gix-sec" -version = "0.8.3" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ede298863db2a0574a14070991710551e76d1f47c9783b62d4fcbca17f56371c" +checksum = "92b9542ac025a8c02ed5d17b3fc031a111a384e859d0be3532ec4d58c40a0f28" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "gix-path", "libc", "windows", @@ -2418,9 +2205,9 @@ dependencies = [ [[package]] name = "gix-tempfile" -version = "7.0.0" +version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fac8310c17406ea619af72f42ee46dac795110f68f41b4f4fa231b69889c6a2" +checksum = "2762b91ff95e27ff3ea95758c0d4efacd7435a1be3629622928b8276de0f72a8" dependencies = [ "gix-fs", "libc", @@ -2433,15 +2220,15 @@ dependencies = [ [[package]] name = "gix-trace" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "103eac621617be3ebe0605c9065ca51a223279a23218aaf67d10daa6e452f663" +checksum = "96b6d623a1152c3facb79067d6e2ecdae48130030cf27d6eb21109f13bd7b836" [[package]] name = "gix-traverse" -version = "0.29.0" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3cdfd54598db4fae57d5ae6f52958422b2d13382d2745796bfe5c8015ffa86e" +checksum = "3ec6358f8373fb018af8fc96c9d2ec6a5b66999e2377dc40b7801351fec409ed" dependencies = [ "gix-commitgraph", "gix-date", @@ -2455,9 +2242,9 @@ dependencies = [ [[package]] name = "gix-url" -version = "0.20.1" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beaede6dbc83f408b19adfd95bb52f1dbf01fb8862c3faf6c6243e2e67fcdfa1" +checksum = "1c79d595b99a6c7ab274f3c991735a0c0f5a816a3da460f513c48edf1c7bf2cc" dependencies = [ "bstr", "gix-features", @@ -2469,41 +2256,20 @@ dependencies = [ [[package]] name = "gix-utils" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7058c94f4164fcf5b8457d35f6d8f6e1007f9f7f938c9c7684a7e01d23c6ddde" +checksum = "b85d89dc728613e26e0ed952a19583744e7f5240fcd4aa30d6c824ffd8b52f0f" dependencies = [ - "fastrand 2.0.0", + "fastrand", ] [[package]] name = "gix-validate" -version = "0.7.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d092b594c8af00a3a31fe526d363ee8a51a6f29d8496cdb991ed2f01ec0ec13" -dependencies = [ - "bstr", - "thiserror", -] - -[[package]] -name = "gix-worktree" -version = "0.21.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1363b9aa66b9e14412ac04e1f759827203f491729d92172535a8ce6cde02efa" +checksum = "e05cab2b03a45b866156e052aa38619f4ece4adcb2f79978bfc249bc3b21b8c5" dependencies = [ "bstr", - "filetime", - "gix-attributes", - "gix-features", - "gix-fs", - "gix-glob", - "gix-hash", - "gix-ignore", - "gix-index", - "gix-object", - "gix-path", - "io-close", "thiserror", ] @@ -2515,9 +2281,9 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "group" -version = "0.10.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c363a5301b8f153d80747126a04b3c82073b9fe3130571a9d170cacdeaf7912" +checksum = "f0f9ef7462f7c099f518d754361858f86d8a07af53ba9af0fe635bbccb151a63" dependencies = [ "ff", "rand_core 0.6.4", @@ -2526,9 +2292,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.20" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ "bytes", "fnv", @@ -2566,9 +2332,12 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" +dependencies = [ + "ahash", +] [[package]] name = "hdrhistogram" @@ -2585,12 +2354,11 @@ dependencies = [ [[package]] name = "headers" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" +checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ - "base64 0.13.1", - "bitflags 1.3.2", + "base64 0.21.4", "bytes", "headers-core", "http", @@ -2625,9 +2393,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" [[package]] name = "hex" @@ -2640,28 +2408,26 @@ dependencies = [ [[package]] name = "hex-literal" -version = "0.3.4" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ebdb29d2ea9ed0083cd8cece49bbd968021bd99b0849edb4a9a7ee0fdf6a4e0" +checksum = "6fe2267d4ed49bc07b63801559be28c718ea06c4738b7a03c94df7386d2cde46" [[package]] name = "hkdf" -version = "0.11.0" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01706d578d5c281058480e673ae4086a9f4710d8df1ad80a5b03e39ece5f886b" +checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" dependencies = [ - "digest 0.9.0", "hmac", ] [[package]] name = "hmac" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "crypto-mac 0.11.1", - "digest 0.9.0", + "digest 0.10.7", ] [[package]] @@ -2703,9 +2469,9 @@ checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humantime" @@ -2730,7 +2496,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -2794,16 +2560,6 @@ dependencies = [ "unicode-normalization", ] -[[package]] -name = "imara-diff" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e98c1d0ad70fc91b8b9654b1f33db55e59579d3b3de2bffdced0fdb810570cb8" -dependencies = [ - "ahash", - "hashbrown 0.12.3", -] - [[package]] name = "impl-trait-for-tuples" version = "0.2.2" @@ -2833,19 +2589,28 @@ version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ - "autocfg 1.1.0", + "autocfg", "hashbrown 0.12.3", - "serde", ] [[package]] name = "indexmap" -version = "2.0.0" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.14.1", + "serde", +] + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array 0.14.7", ] [[package]] @@ -2865,48 +2630,13 @@ dependencies = [ ] [[package]] -name = "instant" -version = "0.1.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "int_traits" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b33c9a5c599d67d051c4dc25eb1b6b4ef715d1763c20c85c688717a1734f204e" - -[[package]] -name = "io-close" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cadcf447f06744f8ce713d2d6239bb5bde2c357a452397a9ed90c625da390bc" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.2", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "iroha" -version = "2.0.0-pre-rc.19" +name = "iroha" +version = "2.0.0-pre-rc.20" dependencies = [ "async-trait", "color-eyre", "dashmap", + "derive_more", "displaydoc", "eyre", "futures", @@ -2920,6 +2650,7 @@ dependencies = [ "iroha_logger", "iroha_macro", "iroha_p2p", + "iroha_primitives", "iroha_schema_gen", "iroha_telemetry", "iroha_version", @@ -2930,20 +2661,19 @@ dependencies = [ "serde", "serde_json", "serial_test", - "supports-color 2.0.0", + "supports-color 2.1.0", "tempfile", "thiserror", "thread-local-panic-hook", "tokio", "tracing", - "uuid", "vergen", "warp", ] [[package]] name = "iroha_cli_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "proc-macro2", "quote", @@ -2953,10 +2683,10 @@ dependencies = [ [[package]] name = "iroha_client" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "attohttpc", - "base64 0.13.1", + "base64 0.21.4", "color-eyre", "criterion", "derive_more", @@ -2984,16 +2714,15 @@ dependencies = [ "test_network", "thiserror", "tokio", - "tokio-tungstenite 0.16.1", + "tokio-tungstenite", "tracing-flame", "tracing-subscriber", - "tungstenite 0.16.0", "url", ] [[package]] name = "iroha_client_cli" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "clap 3.2.25", "color-eyre", @@ -3012,7 +2741,7 @@ dependencies = [ [[package]] name = "iroha_config" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "cfg-if", "derive_more", @@ -3028,7 +2757,8 @@ dependencies = [ "proptest", "serde", "serde_json", - "strum", + "stacker", + "strum 0.25.0", "thiserror", "tracing", "tracing-subscriber", @@ -3037,7 +2767,7 @@ dependencies = [ [[package]] name = "iroha_config_base" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "crossbeam", "displaydoc", @@ -3045,6 +2775,7 @@ dependencies = [ "iroha_config_derive", "iroha_crypto", "json5", + "parking_lot", "serde", "serde_json", "thiserror", @@ -3052,7 +2783,7 @@ dependencies = [ [[package]] name = "iroha_config_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_macro_utils", "proc-macro-error", @@ -3063,7 +2794,7 @@ dependencies = [ [[package]] name = "iroha_core" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "async-trait", "byte-unit", @@ -3087,23 +2818,22 @@ dependencies = [ "iroha_telemetry", "iroha_version", "iroha_wasm_codec", - "itertools", "once_cell", "parity-scale-codec", "parking_lot", "rand 0.8.5", - "sealed", "serde", "serde_json", "tempfile", "thiserror", "tokio", + "uuid", "wasmtime", ] [[package]] name = "iroha_core_wasm_codec_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "once_cell", "proc-macro-error", @@ -3114,31 +2844,51 @@ dependencies = [ [[package]] name = "iroha_crypto" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ + "aead", + "amcl", + "amcl_wrapper", + "arrayref", + "blake2", + "chacha20poly1305", + "curve25519-dalek", "derive_more", + "digest 0.10.7", + "displaydoc", + "ed25519-dalek", + "elliptic-curve", "getset", "hex", "hex-literal", + "hkdf", "iroha_ffi", "iroha_macro", "iroha_primitives", "iroha_schema", - "openssl-sys", + "k256", + "libsodium-sys-stable", + "openssl", "parity-scale-codec", + "rand 0.8.5", + "rand_chacha 0.3.1", + "secp256k1", "serde", "serde_json", "serde_with", - "ursa", + "sha2", + "signature", + "thiserror", + "x25519-dalek", + "zeroize", ] [[package]] name = "iroha_data_model" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "base64 0.13.1", + "base64 0.21.4", "criterion", - "dashmap", "derive_more", "displaydoc", "getset", @@ -3150,35 +2900,39 @@ dependencies = [ "iroha_primitives", "iroha_schema", "iroha_version", + "once_cell", "parity-scale-codec", "serde", "serde_json", "serde_with", - "strum", + "strum 0.25.0", "thiserror", - "tokio", "trybuild", "warp", ] [[package]] name = "iroha_data_model_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ + "darling", + "derive_more", "iroha_data_model", "iroha_macro_utils", - "proc-macro-error", + "iroha_schema", + "manyhow", + "parity-scale-codec", "proc-macro2", "quote", "serde", "serde_json", - "syn 1.0.109", + "syn 2.0.38", "trybuild", ] [[package]] name = "iroha_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "darling", "impls", @@ -3186,13 +2940,13 @@ dependencies = [ "manyhow", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", "trybuild", ] [[package]] name = "iroha_dsl" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_client", "iroha_config", @@ -3204,9 +2958,36 @@ dependencies = [ "serde_json", ] +[[package]] +name = "iroha_executor" +version = "2.0.0-pre-rc.20" +dependencies = [ + "iroha_data_model", + "iroha_executor_derive", + "iroha_schema", + "iroha_smart_contract", + "iroha_smart_contract_utils", + "serde", + "serde_json", +] + +[[package]] +name = "iroha_executor_derive" +version = "2.0.0-pre-rc.20" +dependencies = [ + "darling", + "iroha_data_model", + "iroha_macro_utils", + "manyhow", + "proc-macro2", + "quote", + "syn 1.0.109", + "syn 2.0.38", +] + [[package]] name = "iroha_ffi" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "derive_more", "getset", @@ -3216,24 +2997,24 @@ dependencies = [ [[package]] name = "iroha_ffi_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "darling", - "drop_bomb", "getset", "iroha_ffi", + "iroha_macro_utils", "manyhow", "parse-display", "proc-macro2", "quote", "rustc-hash", - "syn 2.0.26", + "syn 2.0.38", "trybuild", ] [[package]] name = "iroha_futures" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_config", "iroha_futures_derive", @@ -3247,17 +3028,18 @@ dependencies = [ [[package]] name = "iroha_futures_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "proc-macro-error", + "iroha_macro_utils", + "manyhow", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.38", ] [[package]] name = "iroha_genesis" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "derive_more", "eyre", @@ -3275,7 +3057,7 @@ dependencies = [ [[package]] name = "iroha_logger" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "color-eyre", "console-subscriber", @@ -3295,27 +3077,28 @@ dependencies = [ [[package]] name = "iroha_macro" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_derive", ] [[package]] name = "iroha_macro_utils" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "proc-macro-error", + "darling", + "drop_bomb", + "manyhow", "proc-macro2", "quote", "syn 1.0.109", + "syn 2.0.38", ] [[package]] name = "iroha_p2p" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "aead", - "async-stream", "async-trait", "bytes", "derive_more", @@ -3336,12 +3119,11 @@ dependencies = [ [[package]] name = "iroha_primitives" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "derive_more", "displaydoc", "fixnum", - "hex", "iroha_ffi", "iroha_macro", "iroha_primitives_derive", @@ -3358,18 +3140,18 @@ dependencies = [ [[package]] name = "iroha_primitives_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_primitives", "manyhow", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "iroha_schema" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "fixnum", "impls", @@ -3381,20 +3163,20 @@ dependencies = [ [[package]] name = "iroha_schema_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "darling", "iroha_schema", "manyhow", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", "trybuild", ] [[package]] name = "iroha_schema_gen" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_crypto", "iroha_data_model", @@ -3403,15 +3185,46 @@ dependencies = [ "iroha_schema", ] +[[package]] +name = "iroha_smart_contract" +version = "2.0.0-pre-rc.20" +dependencies = [ + "derive_more", + "iroha_data_model", + "iroha_macro", + "iroha_smart_contract_derive", + "iroha_smart_contract_utils", + "parity-scale-codec", + "webassembly-test", +] + +[[package]] +name = "iroha_smart_contract_derive" +version = "2.0.0-pre-rc.20" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "iroha_smart_contract_utils" +version = "2.0.0-pre-rc.20" +dependencies = [ + "iroha_data_model", + "parity-scale-codec", + "webassembly-test", +] + [[package]] name = "iroha_substrate" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" [[package]] name = "iroha_swarm" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.3.15", + "clap 4.4.6", "color-eyre", "derive_more", "expect-test", @@ -3430,30 +3243,33 @@ dependencies = [ [[package]] name = "iroha_telemetry" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "async-trait", "chrono", + "expect-test", "eyre", "futures", + "hex", "iroha_config", "iroha_futures", "iroha_logger", "iroha_telemetry_derive", + "parity-scale-codec", "prometheus", "serde", "serde_json", "streaming-stats", "tokio", "tokio-stream", - "tokio-tungstenite 0.17.2", + "tokio-tungstenite", "url", "vergen", ] [[package]] name = "iroha_telemetry_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_core", "proc-macro-error", @@ -3463,9 +3279,28 @@ dependencies = [ "trybuild", ] +[[package]] +name = "iroha_trigger" +version = "2.0.0-pre-rc.20" +dependencies = [ + "iroha_data_model", + "iroha_smart_contract", + "iroha_smart_contract_utils", + "iroha_trigger_derive", +] + +[[package]] +name = "iroha_trigger_derive" +version = "2.0.0-pre-rc.20" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "iroha_version" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_data_model", "iroha_logger", @@ -3480,7 +3315,7 @@ dependencies = [ [[package]] name = "iroha_version_derive" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "darling", "iroha_macro", @@ -3491,13 +3326,13 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.26", + "syn 2.0.38", "trybuild", ] [[package]] name = "iroha_wasm_builder" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "eyre", "path-absolutize", @@ -3508,9 +3343,9 @@ dependencies = [ [[package]] name = "iroha_wasm_builder_cli" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.3.15", + "clap 4.4.6", "color-eyre", "iroha_wasm_builder", "owo-colors", @@ -3519,7 +3354,7 @@ dependencies = [ [[package]] name = "iroha_wasm_codec" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "iroha_core_wasm_codec_derive", "parity-scale-codec", @@ -3527,14 +3362,22 @@ dependencies = [ "wasmtime", ] +[[package]] +name = "iroha_wasm_test_runner" +version = "2.0.0-pre-rc.20" +dependencies = [ + "anyhow", + "wasmtime", +] + [[package]] name = "is-terminal" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi 0.3.2", - "rustix 0.38.4", + "hermit-abi 0.3.3", + "rustix", "windows-sys 0.48.0", ] @@ -3553,6 +3396,15 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + [[package]] name = "itoa" version = "1.0.9" @@ -3561,9 +3413,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "ittapi" -version = "0.3.4" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41e0d0b7b3b53d92a7e8b80ede3400112a6b8b4c98d1f5b8b16bb787c780582c" +checksum = "6b996fe614c41395cdaedf3cf408a9534851090959d90d54a535f675550b64b1" dependencies = [ "anyhow", "ittapi-sys", @@ -3572,9 +3424,9 @@ dependencies = [ [[package]] name = "ittapi-sys" -version = "0.3.4" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2f8763c96e54e6d6a0dccc2990d8b5e33e3313aaeae6185921a3f4c1614a77c" +checksum = "52f5385394064fa2c886205dba02598013ce83d3e92d33dbdc0c52fe0e7bf4fc" dependencies = [ "cc", ] @@ -3610,21 +3462,23 @@ dependencies = [ [[package]] name = "k256" -version = "0.9.6" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "903ae2481bcdfdb7b68e0a9baa4b7c9aff600b9ae2e8e5bb5833b8c91ab851ea" +checksum = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc" dependencies = [ "cfg-if", "ecdsa", "elliptic-curve", - "sha2 0.9.9", + "once_cell", + "sha2", + "signature", ] [[package]] name = "kagami" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.3.15", + "clap 4.4.6", "color-eyre", "derive_more", "iroha_config", @@ -3647,20 +3501,11 @@ dependencies = [ "cpufeatures", ] -[[package]] -name = "kstring" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3066350882a1cd6d950d055997f379ac37fd39f81cd4d8ed186032eb3c5747" -dependencies = [ - "static_assertions", -] - [[package]] name = "kura_inspector" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.3.15", + "clap 4.4.6", "iroha_core", "iroha_data_model", "iroha_version", @@ -3680,36 +3525,71 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.147" +version = "0.2.150" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" + +[[package]] +name = "libflate" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7d5654ae1795afc7ff76f4365c2c8791b0feb18e8996a96adad8ffd7c3b2bf" +dependencies = [ + "adler32", + "core2", + "crc32fast", + "dary_heap", + "libflate_lz77", +] + +[[package]] +name = "libflate_lz77" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "be5f52fb8c451576ec6b79d3f4deb327398bc05bbdbd99021a6e77a4c855d524" +dependencies = [ + "core2", + "hashbrown 0.13.2", + "rle-decode-fast", +] [[package]] name = "libm" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] -name = "link-cplusplus" -version = "1.0.9" +name = "libsodium-sys-stable" +version = "1.20.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d240c6f7e1ba3a28b0249f774e6a9dd0175054b52dfbb61b16eb8505c3785c9" +checksum = "cfc31f983531631496f4e621110cd81468ab78b65dee0046cfddea83caa2c327" dependencies = [ "cc", + "libc", + "libflate", + "minisign-verify", + "pkg-config", + "tar", + "ureq", + "vcpkg", + "zip", ] [[package]] -name = "linux-raw-sys" -version = "0.3.8" +name = "link-cplusplus" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" +checksum = "9d240c6f7e1ba3a28b0249f774e6a9dd0175054b52dfbb61b16eb8505c3785c9" +dependencies = [ + "cc", +] [[package]] name = "linux-raw-sys" -version = "0.4.3" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" +checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" [[package]] name = "litrs" @@ -3726,15 +3606,15 @@ version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ - "autocfg 1.1.0", + "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.19" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b06a4cde4c0f271a446782e3eff8de789548ce57dbc8eca9292c27f4a42004b4" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "mach" @@ -3747,23 +3627,23 @@ dependencies = [ [[package]] name = "manyhow" -version = "0.5.1" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bd36e9fda56207708aaa550916d2b3fa2eb59fc4eeb711aabdcb995e29f27d6" +checksum = "516b76546495d933baa165075b95c0a15e8f7ef75e53f56b19b7144d80fd52bd" dependencies = [ "darling_core", "manyhow-macros", "proc-macro2", "quote", "syn 1.0.109", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "manyhow-macros" -version = "0.5.1" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f77799897bae27ab96fc15f4a1139b0ae27bdcc9fa103032dae75112c1e781d1" +checksum = "8ba072c0eadade3160232e70893311f1f8903974488096e2eb8e48caba2f0cf1" dependencies = [ "proc-macro-utils", "proc-macro2", @@ -3781,23 +3661,23 @@ dependencies = [ [[package]] name = "matchit" -version = "0.7.0" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b87248edafb776e59e6ee64a79086f65890d3510f2c656c000bf2a7e8a0aea40" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" [[package]] name = "memfd" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffc89ccdc6e10d6907450f753537ebc5c5d3460d2e4e62ea74bd571db62c0f9e" +checksum = "b2cffa4ad52c6f791f4f8b15f0c05f9824b2ced1160e88cc393d64fff9a8ac64" dependencies = [ - "rustix 0.37.23", + "rustix", ] [[package]] @@ -3809,22 +3689,13 @@ dependencies = [ "libc", ] -[[package]] -name = "memoffset" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg 1.1.0", -] - [[package]] name = "memoffset" version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" dependencies = [ - "autocfg 1.1.0", + "autocfg", ] [[package]] @@ -3849,6 +3720,12 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" +[[package]] +name = "minisign-verify" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "933dca44d65cdd53b355d0b73d380a2ff5da71f87f036053188bf1eab6a19881" + [[package]] name = "miniz_oxide" version = "0.7.1" @@ -3890,7 +3767,7 @@ dependencies = [ "log", "memchr", "mime", - "spin", + "spin 0.9.8", "version_check", ] @@ -3943,11 +3820,11 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.15" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ - "autocfg 1.1.0", + "autocfg", "libm", ] @@ -3957,7 +3834,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi 0.3.3", "libc", ] @@ -3972,22 +3849,13 @@ dependencies = [ [[package]] name = "object" -version = "0.30.4" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03b4680b86d9cfafba8fc491dc9b6df26b68cf40e9e6cd73909194759a63c385" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "crc32fast", - "hashbrown 0.13.2", - "indexmap 1.9.3", - "memchr", -] - -[[package]] -name = "object" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1" -dependencies = [ + "hashbrown 0.14.1", + "indexmap 2.0.2", "memchr", ] @@ -4017,11 +3885,11 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.55" +version = "0.10.59" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "345df152bc43501c5eb9e4654ff05f794effb78d4efe3d53abc158baddc0703d" +checksum = "7a257ad03cd8fb16ad4172fedf8094451e1af1c4b70097636ef2eac9a5f0cc33" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.0", "cfg-if", "foreign-types", "libc", @@ -4038,7 +3906,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -4049,18 +3917,18 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "111.26.0+1.1.1u" +version = "300.1.5+3.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efc62c9f12b22b8f5208c23a7200a442b2e5999f8bdf80233852122b5a4f6f37" +checksum = "559068e4c12950d7dcaa1857a61725c0d38d4fc03ff8e070ab31a75d6e316491" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.90" +version = "0.9.95" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374533b0e45f3a7ced10fcaeccca020e66656bc03dac384f852e4e5a7a8104a6" +checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9" dependencies = [ "cc", "libc", @@ -4092,9 +3960,9 @@ dependencies = [ [[package]] name = "parity-scale-codec" -version = "3.6.4" +version = "3.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8e946cc0cc711189c0b0249fb8b599cbeeab9784d83c415719368bb8d4ac64" +checksum = "0dec8a8073036902368c2cdc0387e85ff9a37054d7e7c98e592145e0c92cd4fb" dependencies = [ "arrayvec", "bitvec", @@ -4106,9 +3974,9 @@ dependencies = [ [[package]] name = "parity-scale-codec-derive" -version = "3.6.4" +version = "3.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a296c3079b5fefbc499e1de58dc26c09b1b9a5952d26694ee89f04a43ebbb3e" +checksum = "312270ee71e1cd70289dacf597cab7b207aa107d2f28191c2ae45b2ece18a260" dependencies = [ "proc-macro-crate", "proc-macro2", @@ -4118,9 +3986,9 @@ dependencies = [ [[package]] name = "parity_scale_decoder" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ - "clap 4.3.15", + "clap 4.4.6", "colored", "eyre", "iroha_crypto", @@ -4158,7 +4026,7 @@ dependencies = [ "redox_syscall 0.3.5", "smallvec", "thread-id", - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -4182,9 +4050,9 @@ dependencies = [ "proc-macro2", "quote", "regex", - "regex-syntax 0.7.4", + "regex-syntax 0.7.5", "structmeta", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -4195,18 +4063,18 @@ checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "path-absolutize" -version = "3.1.0" +version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43eb3595c63a214e1b37b44f44b0a84900ef7ae0b4c5efce59e123d246d7a0de" +checksum = "e4af381fe79fa195b4909485d99f73a80792331df0625188e707854f0b3383f5" dependencies = [ "path-dedot", ] [[package]] name = "path-dedot" -version = "3.1.0" +version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d55e486337acb9973cdea3ec5638c1b3bcb22e573b2b7b41969e0c744d5a15e" +checksum = "07ba0ad7e047712414213ff67533e6dd477af0a4e1d14fb52343e53d30ea9397" dependencies = [ "once_cell", ] @@ -4225,19 +4093,20 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.7.1" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d2d1d55045829d65aad9d389139882ad623b33b904e7c9f1b10c5b8927298e5" +checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4" dependencies = [ + "memchr", "thiserror", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.1" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f94bca7e7a599d89dea5dfa309e217e7906c3c007fb9c3299c40b10d6a315d3" +checksum = "35513f630d46400a977c4cb58f78e1bfbe01434316e60c37d27b9ad6139c66d8" dependencies = [ "pest", "pest_generator", @@ -4245,63 +4114,63 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.1" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d490fe7e8556575ff6911e45567ab95e71617f43781e5c05490dc8d75c965c" +checksum = "bc9fc1b9e7057baba189b5c626e2d6f40681ae5b6eb064dc7c7834101ec8123a" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "pest_meta" -version = "2.7.1" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2674c66ebb4b4d9036012091b537aae5878970d6999f81a265034d85b136b341" +checksum = "1df74e9e7ec4053ceb980e7c0c8bd3594e977fde1af91daba9c928e8e8c6708d" dependencies = [ "once_cell", "pest", - "sha2 0.10.7", + "sha2", ] [[package]] name = "petgraph" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dd7d28ee937e54fe3080c91faa1c3a46c06de6252988a7f4592ba2310ef22a4" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 1.9.3", + "indexmap 2.0.2", ] [[package]] name = "pin-project" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030ad2bc4db10a8944cb0d837f158bdfec4d4a4873ab701a95046770d11f8842" +checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec2e072ecce94ec471b13398d5402c188e76ac03cf74dd1a975161b23a3f6d9c" +checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "pin-project-lite" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -4311,9 +4180,9 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkcs8" -version = "0.7.6" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee3ef9b64d26bad0536099c816c6734379e45bbd5f14798def6809e5cc350447" +checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der", "spki", @@ -4325,6 +4194,12 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "platforms" +version = "3.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4503fa043bf02cee09a9582e9554b4c6403b2ef55e4612e96561d294419429f8" + [[package]] name = "plotters" version = "0.3.5" @@ -4355,21 +4230,11 @@ dependencies = [ [[package]] name = "poly1305" -version = "0.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b7456bc1ad2d4cf82b3a016be4c2ac48daf11bf990c1603ebd447fe6f30fca8" -dependencies = [ - "cpuid-bool", - "universal-hash", -] - -[[package]] -name = "polyval" -version = "0.4.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eebcc4aa140b9abd2bc40d9c3f7ccec842679cd79045ac3a7ac698c1a064b7cd" +checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf" dependencies = [ - "cpuid-bool", + "cpufeatures", "opaque-debug 0.3.0", "universal-hash", ] @@ -4427,18 +4292,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" dependencies = [ "unicode-ident", ] [[package]] name = "prodash" -version = "25.0.1" +version = "26.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c236e70b7f9b9ea00d33c69f63ec1ae6e9ae96118923cd37bd4e9c7396f0b107" +checksum = "794b5bf8e2d19b53dcdcec3e4bba628e20f5b6062503ba89281fa7037dd7bbcf" [[package]] name = "prometheus" @@ -4456,19 +4321,19 @@ dependencies = [ [[package]] name = "proptest" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e35c06b98bf36aba164cc17cb25f7e232f5c4aeea73baa14b8a9f0d92dbfa65" +checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e" dependencies = [ "bit-set", - "bitflags 1.3.2", - "byteorder", + "bit-vec", + "bitflags 2.4.0", "lazy_static", "num-traits", "rand 0.8.5", "rand_chacha 0.3.1", - "rand_xorshift 0.3.0", - "regex-syntax 0.6.29", + "rand_xorshift", + "regex-syntax 0.7.5", "rusty-fork", "tempfile", "unarray", @@ -4476,9 +4341,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.11.9" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +checksum = "f4fdd22f3b9c31b53c060df4a0613a1c7f062d4115a2b984dd15b1858f7e340d" dependencies = [ "bytes", "prost-derive", @@ -4486,22 +4351,22 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.11.9" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +checksum = "265baba7fabd416cf5078179f7d2cbeca4ce7a9041111900675ea7c4cb8a4c32" dependencies = [ "anyhow", - "itertools", + "itertools 0.11.0", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.38", ] [[package]] name = "prost-types" -version = "0.11.9" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +checksum = "e081b29f63d83a4bc75cfc9f3fe424f9156cf92d8a4f0c9407cce9a1b67327cf" dependencies = [ "prost", ] @@ -4515,17 +4380,6 @@ dependencies = [ "cc", ] -[[package]] -name = "pulldown-cmark" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffade02495f22453cd593159ea2f59827aae7f53fa8323f756799b670881dcf8" -dependencies = [ - "bitflags 1.3.2", - "memchr", - "unicase", -] - [[package]] name = "quick-error" version = "1.2.3" @@ -4534,9 +4388,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.31" +version = "1.0.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" dependencies = [ "proc-macro2", ] @@ -4547,24 +4401,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.6.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" -dependencies = [ - "autocfg 0.1.8", - "libc", - "rand_chacha 0.1.1", - "rand_core 0.4.2", - "rand_hc 0.1.0", - "rand_isaac", - "rand_jitter", - "rand_pcg", - "rand_xorshift 0.1.1", - "winapi", -] - [[package]] name = "rand" version = "0.7.3" @@ -4575,7 +4411,7 @@ dependencies = [ "libc", "rand_chacha 0.2.1", "rand_core 0.5.1", - "rand_hc 0.2.0", + "rand_hc", ] [[package]] @@ -4589,16 +4425,6 @@ dependencies = [ "rand_core 0.6.4", ] -[[package]] -name = "rand_chacha" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" -dependencies = [ - "autocfg 0.1.8", - "rand_core 0.3.1", -] - [[package]] name = "rand_chacha" version = "0.2.1" @@ -4619,21 +4445,6 @@ dependencies = [ "rand_core 0.6.4", ] -[[package]] -name = "rand_core" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" -dependencies = [ - "rand_core 0.4.2", -] - -[[package]] -name = "rand_core" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" - [[package]] name = "rand_core" version = "0.5.1" @@ -4652,15 +4463,6 @@ dependencies = [ "getrandom 0.2.10", ] -[[package]] -name = "rand_hc" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "rand_hc" version = "0.2.0" @@ -4670,45 +4472,6 @@ dependencies = [ "rand_core 0.5.1", ] -[[package]] -name = "rand_isaac" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" -dependencies = [ - "rand_core 0.3.1", -] - -[[package]] -name = "rand_jitter" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" -dependencies = [ - "libc", - "rand_core 0.4.2", - "winapi", -] - -[[package]] -name = "rand_pcg" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44" -dependencies = [ - "autocfg 0.1.8", - "rand_core 0.4.2", -] - -[[package]] -name = "rand_xorshift" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c" -dependencies = [ - "rand_core 0.3.1", -] - [[package]] name = "rand_xorshift" version = "0.3.0" @@ -4720,9 +4483,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" dependencies = [ "either", "rayon-core", @@ -4730,14 +4493,12 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] @@ -4771,9 +4532,9 @@ dependencies = [ [[package]] name = "regalloc2" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b4dcbd3a2ae7fb94b5813fa0e957c6ab51bf5d0a8ee1b69e0c2d0f1e6eb8485" +checksum = "ad156d539c879b7a24a363a2016d77961786e71f48f2e2fc8302a92abd2429a6" dependencies = [ "hashbrown 0.13.2", "log", @@ -4784,14 +4545,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.9.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2eae68fc220f7cf2532e4494aded17545fce192d59cd996e0fe7887f4ceb575" +checksum = "d119d7c7ca818f8a53c300863d4f87566aac09943aef5b355bb83969dae75d87" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.3", - "regex-syntax 0.7.4", + "regex-automata 0.4.1", + "regex-syntax 0.8.0", ] [[package]] @@ -4805,13 +4566,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.3" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" +checksum = "465c6fc0621e4abc4187a2bda0937bfd4f722c2730b29562e19689ea796c9a4b" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.4", + "regex-syntax 0.8.0", ] [[package]] @@ -4822,9 +4583,46 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" + +[[package]] +name = "regex-syntax" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3cbb081b9784b07cceb8824c8583f86db4814d172ab043f3c23f7dc600bf83d" + +[[package]] +name = "rfc6979" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" +dependencies = [ + "hmac", + "subtle", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "rle-decode-fast" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3582f63211428f83597b51b2ddb88e2a91a9d52d12831f9d08f5e624e8977422" [[package]] name = "rustc-demangle" @@ -4839,39 +4637,68 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] -name = "rustix" -version = "0.37.23" +name = "rustc_version" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", + "semver", ] [[package]] name = "rustix" -version = "0.38.4" +version = "0.38.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" +checksum = "dc99bc2d4f1fed22595588a013687477aedf3cdcfb26558c559edb67b4d9b22e" dependencies = [ - "bitflags 2.3.3", + "bitflags 2.4.0", "errno", "libc", - "linux-raw-sys 0.4.3", + "linux-raw-sys", "windows-sys 0.48.0", ] +[[package]] +name = "rustls" +version = "0.21.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd8d6c9f025a446bc4d18ad9632e69aec8f287aa84499ee335599fabd20c3fd8" +dependencies = [ + "log", + "ring", + "rustls-webpki", + "sct", +] + +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + [[package]] name = "rustls-pemfile" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64 0.21.2", + "base64 0.21.4", +] + +[[package]] +name = "rustls-webpki" +version = "0.101.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe" +dependencies = [ + "ring", + "untrusted", ] [[package]] @@ -4935,42 +4762,54 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3cf7c11c38cb994f3d40e8a8cde3bbd1f72a435e4c49e85d6553d8312306152" [[package]] -name = "sealed" -version = "0.5.0" +name = "sct" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a8caec23b7800fb97971a1c6ae365b6239aaeddfb934d6265f8505e795699d" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" dependencies = [ - "heck", - "proc-macro2", - "quote", - "syn 2.0.26", + "ring", + "untrusted", +] + +[[package]] +name = "sec1" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3e97a565f76233a6003f9f5c54be1d9c5bdfa3eccfb189469f11ec4901c47dc" +dependencies = [ + "base16ct", + "der", + "generic-array 0.14.7", + "pkcs8", + "subtle", + "zeroize", ] [[package]] name = "secp256k1" -version = "0.19.0" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6179428c22c73ac0fbb7b5579a56353ce78ba29759b3b8575183336ea74cdfb" +checksum = "2acea373acb8c21ecb5a23741452acd2593ed44ee3d343e72baaa143bc89d0d5" dependencies = [ - "rand 0.6.5", + "rand 0.8.5", "secp256k1-sys", "serde", ] [[package]] name = "secp256k1-sys" -version = "0.3.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11553d210db090930f4432bea123b31f70bbf693ace14504ea2a35e796c28dd2" +checksum = "09e67c467c38fd24bd5499dc9a18183b31575c12ee549197e3e20d57aa4fe3b7" dependencies = [ "cc", ] [[package]] name = "security-framework" -version = "2.9.1" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc758eb7bffce5b308734e9b0c1468893cae9ff70ebf13e7090be8dcbcc83a8" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" dependencies = [ "bitflags 1.3.2", "core-foundation", @@ -4981,9 +4820,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f51d0c0d83bec45f16480d0ce0058397a69e48fcdc52d1dc8855fb68acbd31a7" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" dependencies = [ "core-foundation-sys", "libc", @@ -4991,15 +4830,15 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" +checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "serde" -version = "1.0.171" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] @@ -5013,32 +4852,22 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_cbor" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" -dependencies = [ - "half", - "serde", -] - [[package]] name = "serde_derive" -version = "1.0.171" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "serde_json" -version = "1.0.103" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" dependencies = [ "itoa", "ryu", @@ -5059,9 +4888,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "2.3.3" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ff71d2c147a7b57362cead5e22f772cd52f6ab31cfcd9edcd7f6aeb2a0afbe" +checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237" dependencies = [ "serde", "serde_with_macros", @@ -5069,23 +4898,23 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "2.3.3" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" +checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "serde_yaml" -version = "0.9.24" +version = "0.9.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd5f51e3fdb5b9cdd1577e1cb7a733474191b1aca6a72c2e50913241632c1180" +checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.0.2", "itoa", "ryu", "serde", @@ -5094,10 +4923,11 @@ dependencies = [ [[package]] name = "serial_test" -version = "0.8.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eec42e7232e5ca56aa59d63af3c7f991fe71ee6a3ddd2d3480834cf3902b007" +checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" dependencies = [ + "dashmap", "futures", "lazy_static", "log", @@ -5107,46 +4937,20 @@ dependencies = [ [[package]] name = "serial_test_derive" -version = "0.8.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1b95bb2f4f624565e8fe8140c789af7e2082c0e0561b5a82a1b678baa9703dc" +checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ - "proc-macro-error", "proc-macro2", "quote", - "rustversion", - "syn 1.0.109", -] - -[[package]] -name = "sha-1" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug 0.3.0", -] - -[[package]] -name = "sha-1" -version = "0.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5058ada175748e33390e40e872bd0fe59a19f265d0158daa551c5a88a76009c" -dependencies = [ - "cfg-if", - "cpufeatures", - "digest 0.10.7", + "syn 2.0.38", ] [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -5161,22 +4965,9 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" [[package]] name = "sha2" -version = "0.9.9" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" -dependencies = [ - "block-buffer 0.9.0", - "cfg-if", - "cpufeatures", - "digest 0.9.0", - "opaque-debug 0.3.0", -] - -[[package]] -name = "sha2" -version = "0.10.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -5185,14 +4976,14 @@ dependencies = [ [[package]] name = "sha256" -version = "1.2.2" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "386f700b0c798d92ac20a53342c240ff9d58030c3b845fbaeb92eead3a774792" +checksum = "7895c8ae88588ccead14ff438b939b0c569cd619116f14b4d13fdff7b8333386" dependencies = [ "async-trait", "bytes", "hex", - "sha2 0.10.7", + "sha2", "tokio", ] @@ -5209,23 +5000,11 @@ dependencies = [ "opaque-debug 0.2.3", ] -[[package]] -name = "sha3" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "keccak", - "opaque-debug 0.3.0", -] - [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] @@ -5238,9 +5017,9 @@ checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "signal-hook" -version = "0.3.16" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b824b6e687aff278cdbf3b36f07aa52d4bd4099699324d5da86a2ebce3aa00b3" +checksum = "8621587d4798caf8eb44879d42e56b9a93ea5dcd315a6487c357130095b62801" dependencies = [ "libc", "signal-hook-registry", @@ -5268,21 +5047,21 @@ dependencies = [ [[package]] name = "signature" -version = "1.3.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2807892cfa58e081aa1f1111391c7a0649d4fa127a4ffbe34bcbfb35a1171a4" +checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" dependencies = [ - "digest 0.9.0", + "digest 0.10.7", "rand_core 0.6.4", ] [[package]] name = "slab" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6528351c9bc8ab22353f9d776db39a20288e8d6c37ef8cfe3317cf875eecfc2d" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" dependencies = [ - "autocfg 1.1.0", + "autocfg", ] [[package]] @@ -5303,9 +5082,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" +checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" dependencies = [ "serde", ] @@ -5320,6 +5099,22 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "spin" version = "0.9.8" @@ -5328,9 +5123,9 @@ checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" [[package]] name = "spinoff" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fee259f96b31e7a18657d11741fe30d63f98e07de70e7a19d2b705ab9b331cdc" +checksum = "20aa2ed67fbb202e7b716ff8bfc6571dd9301617767380197d701c31124e88f6" dependencies = [ "colored", "once_cell", @@ -5339,10 +5134,11 @@ dependencies = [ [[package]] name = "spki" -version = "0.4.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c01a0c15da1b0b0e1494112e7af814a678fec9bd157881b49beac661e9b6f32" +checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" dependencies = [ + "base64ct", "der", ] @@ -5356,7 +5152,20 @@ checksum = "3b9b39299b249ad65f3b7e96443bad61c02ca5cd3589f46cb6d610a0fd6c0d6a" name = "stable_deref_trait" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "stacker" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c886bd4480155fd3ef527d45e9ac8dd7118a898a46530b7b94c3e21866259fce" +dependencies = [ + "cc", + "cfg-if", + "libc", + "psm", + "winapi", +] [[package]] name = "static_assertions" @@ -5388,7 +5197,7 @@ dependencies = [ "proc-macro2", "quote", "structmeta-derive", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -5399,7 +5208,7 @@ checksum = "a60bcaff7397072dca0017d1db428e30d5002e00b6847703e2e42005c95fbe00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -5407,8 +5216,14 @@ name = "strum" version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" + +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" dependencies = [ - "strum_macros", + "strum_macros 0.25.2", ] [[package]] @@ -5424,6 +5239,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.25.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad8d03b598d3d0fff69bf533ee3ef19b8eeb342729596df84bcc7e1f96ec4059" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.38", +] + [[package]] name = "subtle" version = "2.4.1" @@ -5451,9 +5279,9 @@ dependencies = [ [[package]] name = "supports-color" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4950e7174bffabe99455511c39707310e7e9b440364a2fcb1cc21521be57b354" +checksum = "d6398cde53adc3c4557306a96ce67b302968513830a77a95b2b17305d9719a89" dependencies = [ "is-terminal", "is_ci", @@ -5472,9 +5300,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "e96b79aaa137db8f61e26363a0c9b47d8b4ec75da28b7d1d614c2303e232408b" dependencies = [ "proc-macro2", "quote", @@ -5487,56 +5315,54 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "synstructure" -version = "0.12.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", - "unicode-xid", -] - [[package]] name = "tap" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" +[[package]] +name = "tar" +version = "0.4.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" +dependencies = [ + "filetime", + "libc", + "xattr", +] + [[package]] name = "target-lexicon" -version = "0.12.9" +version = "0.12.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df8e77cb757a61f51b947ec4a7e3646efd825b73561db1c232a8ccb639e611a0" +checksum = "14c39fd04924ca3a864207c66fc2cd7d22d7c016007f9ce846cbb9326331930a" [[package]] name = "tempfile" -version = "3.6.0" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31c0432476357e58790aaa47a8efb0c5138f137343f3b5f23bd36a27e3b0a6d6" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "autocfg 1.1.0", "cfg-if", - "fastrand 1.9.0", + "fastrand", "redox_syscall 0.3.5", - "rustix 0.37.23", + "rustix", "windows-sys 0.48.0", ] [[package]] name = "termcolor" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" dependencies = [ "winapi-util", ] [[package]] name = "test_network" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" dependencies = [ "eyre", "futures", @@ -5557,15 +5383,6 @@ dependencies = [ "unique_port", ] -[[package]] -name = "textwrap" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" -dependencies = [ - "unicode-width", -] - [[package]] name = "textwrap" version = "0.16.0" @@ -5574,29 +5391,29 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.43" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a35fc5b8971143ca348fa6df4f024d4d55264f3468c71ad1c2f365b0a4d58c42" +checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.43" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" +checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "thread-id" -version = "4.1.0" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ee93aa2b8331c0fec9091548843f2c90019571814057da3b783f9de09349d73" +checksum = "79474f573561cdc4871a0de34a51c92f7f5a56039113fbb5b9c9f96bdb756669" dependencies = [ "libc", "redox_syscall 0.2.16", @@ -5621,21 +5438,11 @@ dependencies = [ [[package]] name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - -[[package]] -name = "time" -version = "0.3.23" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59e399c068f43a5d116fedaf73b203fa4f9c519f17e2b34f63221d3792f81446" +checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe" dependencies = [ + "deranged", "itoa", "libc", "num_threads", @@ -5646,15 +5453,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.10" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96ba15a897f3c86766b757e5ac7221554c6750054d74d5b28844fce5fb36a6c4" +checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" dependencies = [ "time-core", ] @@ -5686,11 +5493,10 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.29.1" +version = "1.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" dependencies = [ - "autocfg 1.1.0", "backtrace", "bytes", "libc", @@ -5698,7 +5504,7 @@ dependencies = [ "num_cpus", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.4", "tokio-macros", "tracing", "windows-sys 0.48.0", @@ -5722,7 +5528,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] @@ -5735,6 +5541,16 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.14" @@ -5748,47 +5564,27 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.16.1" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e80b39df6afcc12cdf752398ade96a6b9e99c903dfdc36e53ad10b9c366bca72" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", "native-tls", + "rustls", + "rustls-native-certs", "tokio", "tokio-native-tls", - "tungstenite 0.16.0", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f714dd15bead90401d77e04243611caec13726c2408afd5b31901dfcdcb3b181" -dependencies = [ - "futures-util", - "log", - "tokio", - "tungstenite 0.17.3", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54319c93411147bced34cb5609a80e0a8e44c5999c93903a81cd866630ec0bfd" -dependencies = [ - "futures-util", - "log", - "tokio", - "tungstenite 0.18.0", + "tokio-rustls", + "tungstenite", + "webpki-roots", ] [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" dependencies = [ "bytes", "futures-core", @@ -5815,27 +5611,26 @@ checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" [[package]] name = "toml_edit" -version = "0.19.14" +version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.0.2", "toml_datetime", "winnow", ] [[package]] name = "tonic" -version = "0.9.2" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" +checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" dependencies = [ + "async-stream", "async-trait", "axum", - "base64 0.21.2", + "base64 0.21.4", "bytes", - "futures-core", - "futures-util", "h2", "http", "http-body", @@ -5905,21 +5700,21 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", ] [[package]] name = "tracing-bunyan-formatter" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "464ce79ea7f689ca56d90a9c5563e803a4b61b2695e789205644ed8e8101e6bf" +checksum = "b5c266b9ac83dedf0e0385ad78514949e6d89491269e7065bee51d2bb8ec7373" dependencies = [ "ahash", "gethostname", "log", "serde", "serde_json", - "time 0.3.23", + "time", "tracing", "tracing-core", "tracing-log", @@ -6003,9 +5798,9 @@ checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" [[package]] name = "trybuild" -version = "1.0.81" +version = "1.0.85" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04366e99ff743345622cd00af2af01d711dc2d1ef59250d7347698d21b546729" +checksum = "196a58260a906cedb9bf6d8034b6379d0c11f552416960452f267402ceeddff1" dependencies = [ "basic-toml", "glob", @@ -6018,56 +5813,19 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.16.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ad3713a14ae247f22a728a0456a545df14acf3867f905adff84be99e23b3ad1" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" dependencies = [ - "base64 0.13.1", "byteorder", "bytes", + "data-encoding", "http", "httparse", "log", "native-tls", "rand 0.8.5", - "sha-1 0.9.8", - "thiserror", - "url", - "utf-8", -] - -[[package]] -name = "tungstenite" -version = "0.17.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e27992fd6a8c29ee7eef28fc78349aa244134e10ad447ce3b9f0ac0ed0fa4ce0" -dependencies = [ - "base64 0.13.1", - "byteorder", - "bytes", - "http", - "httparse", - "log", - "rand 0.8.5", - "sha-1 0.10.1", - "thiserror", - "url", - "utf-8", -] - -[[package]] -name = "tungstenite" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788" -dependencies = [ - "base64 0.13.1", - "byteorder", - "bytes", - "http", - "httparse", - "log", - "rand 0.8.5", + "rustls", "sha1", "thiserror", "url", @@ -6076,9 +5834,9 @@ dependencies = [ [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ucd-trie" @@ -6094,9 +5852,9 @@ checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" [[package]] name = "unicase" -version = "2.6.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" dependencies = [ "version_check", ] @@ -6115,9 +5873,9 @@ checksum = "98e90c70c9f0d4d1ee6d0a7d04aa06cb9bbd53d8cfbdd62a0269a7c2eb640552" [[package]] name = "unicode-ident" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" @@ -6136,9 +5894,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode-xid" @@ -6157,11 +5915,11 @@ dependencies = [ [[package]] name = "universal-hash" -version = "0.4.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f214e8f697e925001e66ec2c6e37a4ef93f0f78c2eed7814394e10c62025b05" +checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" dependencies = [ - "generic-array 0.14.7", + "crypto-common", "subtle", ] @@ -6172,54 +5930,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" [[package]] -name = "url" -version = "2.4.0" +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "ureq" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "f5ccd538d4a604753ebc2f17cd9946e89b77bf87f6a8e2309667c6f2e87855e3" dependencies = [ - "form_urlencoded", - "idna", - "percent-encoding", - "serde", + "base64 0.21.4", + "log", + "once_cell", + "rustls", + "rustls-webpki", + "url", + "webpki-roots", ] [[package]] -name = "ursa" -version = "0.3.7" +name = "url" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8760a62e18e4d3e3f599e15c09a9f9567fd9d4a90594d45166162be8d232e63b" +checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" dependencies = [ - "aead", - "aes", - "aes-gcm", - "amcl", - "amcl_wrapper", - "arrayref", - "blake2", - "block-modes", - "block-padding 0.2.1", - "chacha20poly1305", - "curve25519-dalek", - "ed25519-dalek", - "failure", - "hex", - "hkdf", - "hmac", - "int_traits", - "k256", - "lazy_static", - "log", - "openssl", - "rand 0.7.3", - "rand_chacha 0.2.1", - "secp256k1", + "form_urlencoded", + "idna", + "percent-encoding", "serde", - "sha2 0.9.9", - "sha3 0.9.1", - "subtle", - "time 0.1.45", - "x25519-dalek", - "zeroize", ] [[package]] @@ -6263,14 +6003,14 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vergen" -version = "8.2.4" +version = "8.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbc5ad0d9d26b2c49a5ab7da76c3e79d3ee37e7821799f8223fcb8f2f391a2e7" +checksum = "85e7dc29b3c54a2ea67ef4f953d5ec0c4085035c0ae2d325be1c0d2144bd9f16" dependencies = [ "anyhow", "gix", "rustversion", - "time 0.3.23", + "time", ] [[package]] @@ -6290,9 +6030,9 @@ dependencies = [ [[package]] name = "walkdir" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" dependencies = [ "same-file", "winapi-util", @@ -6309,9 +6049,9 @@ dependencies = [ [[package]] name = "warp" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba431ef570df1287f7f8b07e376491ad54f84d26ac473489427231e1718e1f69" +checksum = "c1e92e22e03ff1230c03a1a8ee37d2f89cd489e2e541b7550d6afad96faed169" dependencies = [ "bytes", "futures-channel", @@ -6332,7 +6072,7 @@ dependencies = [ "serde_urlencoded", "tokio", "tokio-stream", - "tokio-tungstenite 0.18.0", + "tokio-tungstenite", "tokio-util", "tower-service", "tracing", @@ -6344,12 +6084,6 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -6377,7 +6111,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", "wasm-bindgen-shared", ] @@ -6399,7 +6133,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -6412,23 +6146,32 @@ checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-encoder" -version = "0.31.0" +version = "0.36.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "822b645bf4f2446b949776ffca47e2af60b167209ffb70814ef8779d299cd421" +dependencies = [ + "leb128", +] + +[[package]] +name = "wasm-encoder" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06a3d1b4a575ffb873679402b2aedb3117555eb65c27b1b86c8a91e574bc2a2a" +checksum = "7b09bc5df933a3dabbdb72ae4b6b71be8ae07f58774d5aa41bd20adcd41a235a" dependencies = [ "leb128", ] [[package]] name = "wasm-opt" -version = "0.113.0" +version = "0.116.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65a2799e08026234b07b44da6363703974e75be21430cef00756bbc438c8ff8a" +checksum = "fc942673e7684671f0c5708fc18993569d184265fd5223bb51fc8e5b9b6cfd52" dependencies = [ "anyhow", "libc", - "strum", - "strum_macros", + "strum 0.24.1", + "strum_macros 0.24.3", "tempfile", "thiserror", "wasm-opt-cxx-sys", @@ -6437,9 +6180,9 @@ dependencies = [ [[package]] name = "wasm-opt-cxx-sys" -version = "0.113.0" +version = "0.116.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8d26f86d1132245e8bcea8fac7f02b10fb885b6696799969c94d7d3c14db5e1" +checksum = "8c57b28207aa724318fcec6575fe74803c23f6f266fce10cbc9f3f116762f12e" dependencies = [ "anyhow", "cxx", @@ -6449,9 +6192,9 @@ dependencies = [ [[package]] name = "wasm-opt-sys" -version = "0.113.0" +version = "0.116.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497d069cd3420cdd52154a320b901114a20946878e2de62c670f9d906e472370" +checksum = "8a1cce564dc768dacbdb718fc29df2dba80bd21cb47d8f77ae7e3d95ceb98cbe" dependencies = [ "anyhow", "cc", @@ -6461,19 +6204,19 @@ dependencies = [ [[package]] name = "wasmparser" -version = "0.107.0" +version = "0.116.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29e3ac9b780c7dda0cac7a52a5d6d2d6707cc6e3451c9db209b6c758f40d7acb" +checksum = "a58e28b80dd8340cb07b8242ae654756161f6fc8d0038123d679b7b99964fa50" dependencies = [ - "indexmap 1.9.3", + "indexmap 2.0.2", "semver", ] [[package]] name = "wasmtime" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b1f817f2ca5070983c71f1205fbab5848c9073df7f4e1af9fdceb4cc4a1b8e5" +checksum = "ae4b1702ef55144d6f594085f4989dc71fb71a791be1c8354ecc8e489b81199b" dependencies = [ "anyhow", "async-trait", @@ -6481,17 +6224,19 @@ dependencies = [ "bumpalo", "cfg-if", "fxprof-processed-profile", - "indexmap 1.9.3", + "indexmap 2.0.2", "libc", "log", - "object 0.30.4", + "object", "once_cell", "paste", "psm", "rayon", "serde", + "serde_derive", "serde_json", "target-lexicon", + "wasm-encoder 0.36.2", "wasmparser", "wasmtime-cache", "wasmtime-component-macro", @@ -6506,28 +6251,28 @@ dependencies = [ [[package]] name = "wasmtime-asm-macros" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f82fbfda4610e9225238c62574ecded8e9d6ad3a12f387ac45819ecad5c3f9b" +checksum = "c981d0e87bb3e98e08e76644e7ae5dfdef7f1d4105145853f3d677bb4535d65f" dependencies = [ "cfg-if", ] [[package]] name = "wasmtime-cache" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4f5b87f1ed383d6c219c04467ab6ae87990d6c2815d5a990138990a7fcbab95" +checksum = "3d7ba8adaa84fdb9dd659275edcf7fc5282c44b9c9f829986c71d44fd52ea80a" dependencies = [ "anyhow", - "base64 0.21.2", + "base64 0.21.4", "bincode", "directories-next", - "file-per-thread-logger", "log", - "rustix 0.37.23", + "rustix", "serde", - "sha2 0.10.7", + "serde_derive", + "sha2", "toml", "windows-sys 0.48.0", "zstd", @@ -6535,14 +6280,14 @@ dependencies = [ [[package]] name = "wasmtime-component-macro" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e27b96c540c78e12b60025fcbc0ba8a55bff1b32885a5e8eae2df765a6bc97ac" +checksum = "c91dcbbd0e1f094351d1ae0e53463c63ba53ec8f8e0e21d17567c1979a8c3758" dependencies = [ "anyhow", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.38", "wasmtime-component-util", "wasmtime-wit-bindgen", "wit-parser", @@ -6550,17 +6295,18 @@ dependencies = [ [[package]] name = "wasmtime-component-util" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0928fe66c22bf8887e2fb524b7647308b8ce836a333af8504e4f1d80b8ea849f" +checksum = "3e85f1319a7ed36aa59446ab7e967d0c2fb0cd179bf56913633190b44572023e" [[package]] name = "wasmtime-cranelift" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b659f6e58662d1131f250339acd03aa49377f9351474282699985b79ca4d4a7c" +checksum = "1453665878e16245b9a25405e550c4a36c6731c6e34ea804edc002a38c3e6741" dependencies = [ "anyhow", + "cfg-if", "cranelift-codegen", "cranelift-control", "cranelift-entity", @@ -6569,43 +6315,45 @@ dependencies = [ "cranelift-wasm", "gimli", "log", - "object 0.30.4", + "object", "target-lexicon", "thiserror", "wasmparser", "wasmtime-cranelift-shared", "wasmtime-environ", + "wasmtime-versioned-export-macros", ] [[package]] name = "wasmtime-cranelift-shared" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74171de083bf2ecb716c507900f825e2b858346c714fbf48f4763ea760f998a8" +checksum = "d3dface3d9b72b4670781ff72675eabb291e2836b5dded6bb312b577d2bb561f" dependencies = [ "anyhow", "cranelift-codegen", "cranelift-control", "cranelift-native", "gimli", - "object 0.30.4", + "object", "target-lexicon", "wasmtime-environ", ] [[package]] name = "wasmtime-environ" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b124cbac1a3e04a744c76b3f77919343ef16dc4c818a2406dd7b689b16a54639" +checksum = "c0116108e7d231cce15fe7dd642c66c3abb14dbcf169b0130e11f223ce8d1ad7" dependencies = [ "anyhow", "cranelift-entity", "gimli", - "indexmap 1.9.3", + "indexmap 2.0.2", "log", - "object 0.30.4", + "object", "serde", + "serde_derive", "target-lexicon", "thiserror", "wasmparser", @@ -6614,24 +6362,26 @@ dependencies = [ [[package]] name = "wasmtime-fiber" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f92ffb8869395c63100ffefbd71cf9489e7e9218e63a3798dcfe93fa8945f9cf" +checksum = "b8a5896355c37bf0f9feb4f1299142ef4bed8c92576aa3a41d150fed0cafa056" dependencies = [ + "anyhow", "cc", "cfg-if", - "rustix 0.37.23", + "rustix", "wasmtime-asm-macros", + "wasmtime-versioned-export-macros", "windows-sys 0.48.0", ] [[package]] name = "wasmtime-jit" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ff15f426c2378f32ffb6d9b4370e3504231492e93f6968e8b5102c3256bbc4" +checksum = "e32b210767452f6b20157bb7c7d98295b92cc47aaad2a8aa31652f4469813a5d" dependencies = [ - "addr2line 0.19.0", + "addr2line", "anyhow", "bincode", "cfg-if", @@ -6639,10 +6389,11 @@ dependencies = [ "gimli", "ittapi", "log", - "object 0.30.4", + "object", "rustc-demangle", - "rustix 0.37.23", + "rustix", "serde", + "serde_derive", "target-lexicon", "wasmtime-environ", "wasmtime-jit-debug", @@ -6653,20 +6404,21 @@ dependencies = [ [[package]] name = "wasmtime-jit-debug" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c549e219102426aa1f90bd18e56a3195ed1e696c318abb3f501c1f4924b530ac" +checksum = "bffd2785a16c55ac77565613ebda625f5850d4014af0499df750e8de97c04547" dependencies = [ - "object 0.30.4", + "object", "once_cell", - "rustix 0.37.23", + "rustix", + "wasmtime-versioned-export-macros", ] [[package]] name = "wasmtime-jit-icache-coherence" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cf02fedda287a409cff80ad40a7c6c0f0771e99b0cd5e2b79d9cb7ecdc1b2f4" +checksum = "b73ad1395eda136baec5ece7e079e0536a82ef73488e345456cc9b89858ad0ec" dependencies = [ "cfg-if", "libc", @@ -6675,70 +6427,92 @@ dependencies = [ [[package]] name = "wasmtime-runtime" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc38c6229a5d3b8a2528eb33eb11d3e7ebf570259c7cd2f01e8668fe783ea443" +checksum = "77b50f7f3c1a8dabb2607f32a81242917bd77cee75f3dec66e04b02ccbb8ba07" dependencies = [ "anyhow", "cc", "cfg-if", - "indexmap 1.9.3", + "indexmap 2.0.2", "libc", "log", "mach", "memfd", - "memoffset 0.8.0", + "memoffset", "paste", "rand 0.8.5", - "rustix 0.37.23", + "rustix", "sptr", + "wasm-encoder 0.36.2", "wasmtime-asm-macros", "wasmtime-environ", "wasmtime-fiber", "wasmtime-jit-debug", + "wasmtime-versioned-export-macros", + "wasmtime-wmemcheck", "windows-sys 0.48.0", ] [[package]] name = "wasmtime-types" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "768f6c5e7afc3a02eff2753196741db8e5ac5faf26a1e2204d7341b30a637c6f" +checksum = "447973db3dc5c24db14130ab0922795c58790aec296d198ad9d253b82ec67471" dependencies = [ "cranelift-entity", "serde", + "serde_derive", "thiserror", "wasmparser", ] +[[package]] +name = "wasmtime-versioned-export-macros" +version = "15.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a347bb8ecf12275fb180afb1b1c85c9e186553c43109737bffed4f54c2aa365" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.38", +] + [[package]] name = "wasmtime-wit-bindgen" -version = "11.0.1" +version = "15.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84a4a005a6a2d5faa7cd953d389da8ae979cb571fe40edec7769649d8c98d874" +checksum = "41786c7bbbf250c0e685b291323b50c6bb65f0505a2c0b4f0b598c740f13f185" dependencies = [ "anyhow", "heck", + "indexmap 2.0.2", "wit-parser", ] +[[package]] +name = "wasmtime-wmemcheck" +version = "15.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47907bdd67500c66fa308acbce7387c7bfb63b5505ef81be7fc897709afcca60" + [[package]] name = "wast" -version = "62.0.0" +version = "69.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f7ee878019d69436895f019b65f62c33da63595d8e857cbdc87c13ecb29a32" +checksum = "efa51b5ad1391943d1bfad537e50f28fe938199ee76b115be6bae83802cd5185" dependencies = [ "leb128", "memchr", "unicode-width", - "wasm-encoder", + "wasm-encoder 0.38.0", ] [[package]] name = "wat" -version = "1.0.68" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "295572bf24aa5b685a971a83ad3e8b6e684aaad8a9be24bc7bf59bed84cc1c08" +checksum = "74a4c2488d058326466e086a43f5d4ea448241a8d0975e3eb0642c0828be1eb3" dependencies = [ "wast", ] @@ -6764,10 +6538,10 @@ dependencies = [ ] [[package]] -name = "wildmatch" -version = "2.1.1" +name = "webpki-roots" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee583bdc5ff1cf9db20e9db5bb3ff4c3089a8f6b8b31aff265c9aba85812db86" +checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" [[package]] name = "winapi" @@ -6787,9 +6561,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -6806,7 +6580,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -6824,7 +6598,7 @@ version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets 0.48.1", + "windows-targets 0.48.5", ] [[package]] @@ -6844,17 +6618,17 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.48.0", - "windows_aarch64_msvc 0.48.0", - "windows_i686_gnu 0.48.0", - "windows_i686_msvc 0.48.0", - "windows_x86_64_gnu 0.48.0", - "windows_x86_64_gnullvm 0.48.0", - "windows_x86_64_msvc 0.48.0", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] [[package]] @@ -6865,9 +6639,9 @@ checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_msvc" @@ -6877,9 +6651,9 @@ checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_i686_gnu" @@ -6889,9 +6663,9 @@ checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_msvc" @@ -6901,9 +6675,9 @@ checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_x86_64_gnu" @@ -6913,9 +6687,9 @@ checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnullvm" @@ -6925,9 +6699,9 @@ checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_msvc" @@ -6937,33 +6711,34 @@ checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "winnow" -version = "0.5.0" +version = "0.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fac9742fd1ad1bd9643b991319f72dd031016d44b77039a26977eb667141e7" +checksum = "037711d82167854aff2018dfd193aa0fef5370f456732f0d5a0c59b0f1b4b907" dependencies = [ "memchr", ] [[package]] name = "wit-parser" -version = "0.8.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6daec9f093dbaea0e94043eeb92ece327bbbe70c86b1f41aca9bbfefd7f050f0" +checksum = "15df6b7b28ce94b8be39d8df5cb21a08a4f3b9f33b631aedb4aa5776f785ead3" dependencies = [ "anyhow", "id-arena", - "indexmap 1.9.3", + "indexmap 2.0.2", "log", - "pulldown-cmark", "semver", + "serde", + "serde_derive", + "serde_json", "unicode-xid", - "url", ] [[package]] @@ -6977,20 +6752,30 @@ dependencies = [ [[package]] name = "x25519-dalek" -version = "1.2.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2392b6b94a576b4e2bf3c5b2757d63f10ada8020a2e4d08ac849ebcf6ea8e077" +checksum = "fb66477291e7e8d2b0ff1bcb900bf29489a9692816d79874bea351e7a8b6de96" dependencies = [ "curve25519-dalek", - "rand_core 0.5.1", + "rand_core 0.6.4", + "serde", "zeroize", ] +[[package]] +name = "xattr" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" +dependencies = [ + "libc", +] + [[package]] name = "zeroize" -version = "1.3.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" dependencies = [ "zeroize_derive", ] @@ -7003,7 +6788,19 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.38", +] + +[[package]] +name = "zip" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "760394e246e4c28189f19d488c058bf16f564016aefac5d32bb1f3b51d5e9261" +dependencies = [ + "byteorder", + "crc32fast", + "crossbeam-utils", + "flate2", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index e9717ddedd8..2dcb4ce2bab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace.package] edition = "2021" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" # TODO: teams are being deprecated update the authors URL authors = ["Iroha 2 team "] @@ -15,121 +15,191 @@ categories = ["cryptography::cryptocurrencies"] [workspace.dependencies] iroha = { path = "cli" } -iroha_dsl = { version = "=2.0.0-pre-rc.19", path = "dsl" } -iroha_cli_derive = { version = "=2.0.0-pre-rc.19", path = "cli/derive" } -iroha_macro_utils = { version = "=2.0.0-pre-rc.19", path = "macro/utils" } -iroha_wasm_builder = { version = "=2.0.0-pre-rc.19", path = "wasm_builder" } -iroha_telemetry = { version = "=2.0.0-pre-rc.19", path = "telemetry" } -iroha_telemetry_derive = { version = "=2.0.0-pre-rc.19", path = "telemetry/derive" } -iroha_p2p = { version = "=2.0.0-pre-rc.19", path = "p2p" } -iroha_data_model_derive = { version = "=2.0.0-pre-rc.19", path = "data_model/derive" } -iroha_core = { version = "=2.0.0-pre-rc.19 ", path = "core" } -iroha_primitives = { version = "=2.0.0-pre-rc.19", path = "primitives", default-features = false } -iroha_primitives_derive = { version = "=2.0.0-pre-rc.19", path = "primitives/derive" } -iroha_data_model = { version = "=2.0.0-pre-rc.19", path = "data_model" } -iroha_client = { version = "=2.0.0-pre-rc.19", path = "client" } -iroha_config = { version = "=2.0.0-pre-rc.19", path = "config" } -iroha_config_base = { version = "=2.0.0-pre-rc.19", path = "config/base" } -iroha_config_derive = { version = "=2.0.0-pre-rc.19", path = "config/base/derive" } -iroha_schema_gen = { version = "=2.0.0-pre-rc.19", path = "schema/gen" } -iroha_schema = { version = "=2.0.0-pre-rc.19", path = "schema", default-features = false } -iroha_schema_derive = { version = "=2.0.0-pre-rc.19", path = "schema/derive" } -iroha_logger = { version = "=2.0.0-pre-rc.19", path = "logger" } -iroha_crypto = { version = "=2.0.0-pre-rc.19", path = "crypto", default-features = false } -iroha_macro = { version = "=2.0.0-pre-rc.19", path = "macro", default-features = false } -iroha_derive = { version = "=2.0.0-pre-rc.19", path = "macro/derive" } -iroha_futures = { version = "=2.0.0-pre-rc.19", path = "futures" } -iroha_futures_derive = { version = "=2.0.0-pre-rc.19", path = "futures/derive" } -iroha_genesis = { version = "=2.0.0-pre-rc.19", path = "genesis" } -iroha_ffi = { version = "=2.0.0-pre-rc.19", path = "ffi" } -iroha_ffi_derive = { version = "=2.0.0-pre-rc.19", path = "ffi/derive" } -iroha_version = { version = "=2.0.0-pre-rc.19", path = "version", default-features = false } -iroha_version_derive = { version = "=2.0.0-pre-rc.19", path = "version/derive", default-features = false } -iroha_wasm_codec = { version = "=2.0.0-pre-rc.19", path = "wasm_codec" } -test_network = { version = "=2.0.0-pre-rc.19", path = "core/test_network" } +iroha_dsl = { version = "=2.0.0-pre-rc.20", path = "dsl" } +iroha_cli_derive = { version = "=2.0.0-pre-rc.20", path = "cli/derive" } +iroha_macro_utils = { version = "=2.0.0-pre-rc.20", path = "macro/utils" } +iroha_telemetry = { version = "=2.0.0-pre-rc.20", path = "telemetry" } +iroha_telemetry_derive = { version = "=2.0.0-pre-rc.20", path = "telemetry/derive" } +iroha_p2p = { version = "=2.0.0-pre-rc.20", path = "p2p" } +iroha_core = { version = "=2.0.0-pre-rc.20 ", path = "core" } +iroha_primitives = { version = "=2.0.0-pre-rc.20", path = "primitives", default-features = false } +iroha_primitives_derive = { version = "=2.0.0-pre-rc.20", path = "primitives/derive" } +iroha_data_model = { version = "=2.0.0-pre-rc.20", path = "data_model", default-features = false } +iroha_data_model_derive = { version = "=2.0.0-pre-rc.20", path = "data_model/derive" } +iroha_client = { version = "=2.0.0-pre-rc.20", path = "client" } +iroha_config = { version = "=2.0.0-pre-rc.20", path = "config" } +iroha_config_base = { version = "=2.0.0-pre-rc.20", path = "config/base" } +iroha_config_derive = { version = "=2.0.0-pre-rc.20", path = "config/base/derive" } +iroha_schema_gen = { version = "=2.0.0-pre-rc.20", path = "schema/gen" } +iroha_schema = { version = "=2.0.0-pre-rc.20", path = "schema", default-features = false } +iroha_schema_derive = { version = "=2.0.0-pre-rc.20", path = "schema/derive" } +iroha_logger = { version = "=2.0.0-pre-rc.20", path = "logger" } +iroha_crypto = { version = "=2.0.0-pre-rc.20", path = "crypto", default-features = false } +iroha_macro = { version = "=2.0.0-pre-rc.20", path = "macro", default-features = false } +iroha_derive = { version = "=2.0.0-pre-rc.20", path = "macro/derive" } +iroha_futures = { version = "=2.0.0-pre-rc.20", path = "futures" } +iroha_futures_derive = { version = "=2.0.0-pre-rc.20", path = "futures/derive" } +iroha_genesis = { version = "=2.0.0-pre-rc.20", path = "genesis" } +iroha_ffi = { version = "=2.0.0-pre-rc.20", path = "ffi" } +iroha_ffi_derive = { version = "=2.0.0-pre-rc.20", path = "ffi/derive" } +iroha_version = { version = "=2.0.0-pre-rc.20", path = "version", default-features = false } +iroha_version_derive = { version = "=2.0.0-pre-rc.20", path = "version/derive", default-features = false } +iroha_wasm_codec = { version = "=2.0.0-pre-rc.20", path = "wasm_codec" } +iroha_wasm_builder = { version = "=2.0.0-pre-rc.20", path = "wasm_builder" } + +iroha_smart_contract = { version = "=2.0.0-pre-rc.20", path = "smart_contract" } +iroha_smart_contract_derive = { version = "=2.0.0-pre-rc.20", path = "smart_contract/derive" } +iroha_smart_contract_utils = { version = "=2.0.0-pre-rc.20", path = "smart_contract/utils" } +iroha_executor = { version = "=2.0.0-pre-rc.20", path = "smart_contract/executor" } +iroha_executor_derive = { version = "=2.0.0-pre-rc.20", path = "smart_contract/executor/derive" } +iroha_trigger = { version = "=2.0.0-pre-rc.20", path = "smart_contract/trigger" } +iroha_trigger_derive = { version = "=2.0.0-pre-rc.20", path = "smart_contract/trigger/derive" } + +test_network = { version = "=2.0.0-pre-rc.20", path = "core/test_network" } proc-macro-error = "1.0.4" -proc-macro2 = "1.0.49" +proc-macro2 = "1.0.69" syn = { package = "syn", version = "1.0.109", default-features = false } -syn2 = { package = "syn", version = "2.0.25", default-features = false } -quote = "1.0.23" -manyhow = { version = "0.5.1", features = ["darling"] } -darling = "0.20.1" +syn2 = { package = "syn", version = "2.0.38", default-features = false } +quote = "1.0.33" +manyhow = { version = "0.8.1", features = ["darling"] } +darling = "0.20.3" -futures = { version = "0.3.25", default-features = false } -async-stream = "0.3.3" -tokio = "1.23.0" -tokio-stream = "0.1.11" -tokio-tungstenite = "0.17.2" +futures = { version = "0.3.28", default-features = false } +tokio = "1.33.0" +tokio-stream = "0.1.14" +tokio-tungstenite = "0.20.1" crossbeam = "0.8.2" crossbeam-queue = "0.3.8" parking_lot = { version = "0.12.1" } -once_cell = "1.16.0" -tempfile = "3.3.0" -path-absolutize = "3.1.0" +once_cell = "1.18.0" +tempfile = "3.8.0" +path-absolutize = "3.1.1" pathdiff = "0.2.1" -itertools = "0.10.5" -bytes = "1.4.0" +bytes = "1.5.0" -vergen = { version = "8.1.1", default-features = false } -trybuild = "1.0.73" +vergen = { version = "8.2.5", default-features = false } +trybuild = "1.0.85" impls = "1.0.3" -base64 = { version = "0.13.1", default-features = false } +base64 = { version = "0.21.4", default-features = false } hex = { version = "0.4.3", default-features = false } -fixnum = { version = "0.9.1", default-features = false } -url = "2.3.1" +fixnum = { version = "0.9.2", default-features = false } +url = "2.4.1" prometheus = { version = "0.13.3", default-features = false } -clap = "4.2.1" +clap = "4.4.6" owo-colors = "3.5.0" -supports-color = "2.0.0" +supports-color = "2.1.0" inquire = "0.6.2" -spinoff = "0.7.0" +spinoff = "0.8.0" duct = "0.13.6" -criterion = "0.3.6" -proptest = "1.0.0" +criterion = "0.5.1" +proptest = "1.3.1" expect-test = "1.4.1" eyre = "0.6.8" color-eyre = "0.6.2" -thiserror = { version = "1.0.38", default-features = false } +thiserror = { version = "1.0.49", default-features = false } displaydoc = { version = "0.2.4", default-features = false } cfg-if = "1.0.0" derive_more = { version = "0.99.17", default-features = false } -async-trait = "0.1.60" -strum = { version = "0.24.1", default-features = false } +async-trait = "0.1.73" +strum = { version = "0.25.0", default-features = false } getset = "0.1.2" -hex-literal = "0.3.4" - -ursa = "0.3.7" -aead = "0.3.2" +hex-literal = "0.4.1" rand = "0.8.5" -warp = { version = "0.3.5", default-features = false } -wasmtime = "11.0.1" +warp = { version = "0.3.6", default-features = false } +wasmtime = "15.0.0" tracing = "0.1.37" -tracing-core = "0.1.30" -tracing-subscriber = { version = "0.3.16", default-features = false } -tracing-futures = { version = "0.2.5", default-features = false } -tracing-bunyan-formatter = { version = "0.3.4", default-features = false } +tracing-subscriber = { version = "0.3.17", default-features = false } -dashmap = "5.4.0" +dashmap = "5.5.3" rustc-hash = "1.1.0" -serde = { version = "1.0.151", default-features = false } -serde_json = { version = "1.0.91", default-features = false } -serde_yaml = "0.9.21" -serde_with = { version = "2.2.0", default-features = false } -parity-scale-codec = { version = "3.2.1", default-features = false } +serde = { version = "1.0.188", default-features = false } +serde_json = { version = "1.0.107", default-features = false } +serde_yaml = "0.9.25" +serde_with = { version = "3.3.0", default-features = false } +parity-scale-codec = { version = "3.6.5", default-features = false } json5 = "0.4.1" +[workspace.lints] +rustdoc.private_doc_tests = "deny" + +rust.anonymous_parameters = "deny" +rust.future_incompatible = "deny" +rust.missing_copy_implementations = "deny" +rust.missing_docs = "deny" +rust.nonstandard_style = "deny" +rust.rust_2018_idioms = "deny" +rust.trivial_casts = "deny" +rust.trivial_numeric_casts = "deny" +rust.unconditional_recursion = "deny" +rust.unsafe_code = "deny" +rust.unused = "deny" +rust.unused_import_braces = "deny" +rust.variant_size_differences = "deny" +rust.unused_tuple_struct_fields = "deny" +rust.explicit_outlives_requirements = "deny" +rust.non_ascii_idents = "deny" +rust.elided_lifetimes_in_paths = "allow" +rust.unknown_lints = "warn" +rust.single_use_lifetimes = "warn" +rust.unused_lifetimes = "warn" +# TODO: reenable +# rust.unsafe_op_in_unsafe_fn = "deny" + +# lower the priority to allow overriding later +clippy.all = { level = "deny", priority = -1 } + +# pedantic +clippy.pedantic = { level = "warn", priority = -1 } +clippy.match_wildcard_for_single_variants = "allow" +clippy.semicolon_if_nothing_returned = "allow" +clippy.wildcard_imports = "allow" +clippy.manual_let_else = "allow" +clippy.enum_glob_use = "allow" +clippy.module_name_repetitions = "allow" +clippy.must_use_candidate = "allow" + +# restriction +clippy.dbg_macro = "deny" + +# nursery +clippy.debug_assert_with_mut_call = "deny" +clippy.derive_partial_eq_without_eq = "deny" +clippy.empty_line_after_outer_attr = "deny" +clippy.fallible_impl_from = "deny" +clippy.future_not_send = "deny" +clippy.iter_with_drain = "deny" +clippy.mutex_integer = "deny" +clippy.needless_collect = "deny" +clippy.path_buf_push_overwrite = "deny" +clippy.suboptimal_flops = "deny" +clippy.trailing_empty_array = "deny" +clippy.transmute_undefined_repr = "deny" +clippy.trivial_regex = "deny" +clippy.unused_peekable = "deny" +clippy.unused_rounding = "deny" +clippy.option_if_let_else = "warn" +clippy.or_fun_call = "warn" +clippy.redundant_pub_crate = "warn" +clippy.string_lit_as_bytes = "warn" +clippy.suspicious_operation_groupings = "warn" +clippy.useless_let_if_seq = "warn" + +#cargo +clippy.redundant_feature_names = "deny" +clippy.wildcard_dependencies = "deny" + [workspace] resolver = "2" members = [ @@ -160,6 +230,13 @@ members = [ "schema", "schema/derive", "schema/gen", + "smart_contract", + "smart_contract/derive", + "smart_contract/trigger", + "smart_contract/trigger/derive", + "smart_contract/utils", + "smart_contract/executor", + "smart_contract/executor/derive", "substrate", "telemetry", "tools/kagami", @@ -167,6 +244,7 @@ members = [ "tools/parity_scale_decoder", "tools/swarm", "tools/wasm_builder_cli", + "tools/wasm_test_runner", "version", "version/derive", "wasm_codec", @@ -176,9 +254,5 @@ members = [ [profile.deploy] inherits = "release" -opt-level = 3 -debug = false strip = "symbols" -debug-assertions = false lto = true -incremental = false diff --git a/Dockerfile b/Dockerfile index 27292473800..7c6d9eced09 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ ENV CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER=/x86_64-linux-musl-native/bin/ # builder stage WORKDIR /iroha COPY . . -RUN cargo build --target x86_64-unknown-linux-musl --features vendored --profile deploy +RUN cargo build --target x86_64-unknown-linux-musl --profile deploy # final image diff --git a/Dockerfile.build b/Dockerfile.build index c2d5e7c3632..4169fe29bd5 100644 --- a/Dockerfile.build +++ b/Dockerfile.build @@ -2,9 +2,21 @@ FROM archlinux:base-devel ENV RUSTUP_HOME=/usr/local/rustup \ CARGO_HOME=/usr/local/cargo \ - PATH=/usr/local/cargo/bin:$PATH + PATH=/usr/local/cargo/bin:$PATH \ + POETRY_HOME=/opt/poetry \ + TORII_API_PORT_MIN=8080 \ + TORII_API_PORT_MAX=8083 -RUN pacman -Syu rustup mold musl rust-musl openssl libgit2 git docker docker-buildx docker-compose --noconfirm +ENV PATH=$POETRY_HOME/bin:$PATH + +RUN pacman -Syu rustup mold musl rust-musl openssl libgit2 \ + git docker docker-buildx docker-compose \ + python python-pip --noconfirm --disable-download-timeout && \ + curl -sSL https://install.python-poetry.org | python3 - + +WORKDIR /client_cli/pytests +COPY /client_cli/pytests/pyproject.toml /client_cli/pytests/poetry.lock $WORKDIR +RUN poetry install RUN rustup toolchain install nightly-2023-06-25-x86_64-unknown-linux-gnu RUN rustup default nightly-2023-06-25-x86_64-unknown-linux-gnu @@ -12,8 +24,6 @@ RUN rustup component add llvm-tools-preview clippy RUN rustup component add rust-src RUN rustup component add rustfmt RUN rustup target add wasm32-unknown-unknown -RUN cargo install cargo-lints -RUN cargo install webassembly-test-runner RUN cargo install cargo-llvm-cov # TODO: Figure out a way to pull in libgit2, which doesn't crash if this useless variable is gone. diff --git a/Dockerfile.build.glibc b/Dockerfile.build.glibc index 5b9ee42d4b8..799b9bfa69d 100644 --- a/Dockerfile.build.glibc +++ b/Dockerfile.build.glibc @@ -2,9 +2,21 @@ FROM archlinux:base-devel ENV RUSTUP_HOME=/usr/local/rustup \ CARGO_HOME=/usr/local/cargo \ - PATH=/usr/local/cargo/bin:$PATH + PATH=/usr/local/cargo/bin:$PATH \ + POETRY_HOME=/opt/poetry \ + TORII_API_PORT_MIN=8080 \ + TORII_API_PORT_MAX=8083 -RUN pacman -Syu rustup mold openssl libgit2 git docker docker-buildx docker-compose glibc lib32-glibc --noconfirm +ENV PATH=$POETRY_HOME/bin:$PATH + +RUN pacman -Syu rustup mold openssl libgit2 git docker \ + docker-buildx docker-compose glibc lib32-glibc \ + python python-pip --noconfirm --disable-download-timeout && \ + curl -sSL https://install.python-poetry.org | python3 - + +WORKDIR /client_cli/pytests +COPY /client_cli/pytests/pyproject.toml /client_cli/pytests/poetry.lock $WORKDIR +RUN poetry install RUN rustup toolchain install nightly-2023-06-25-x86_64-unknown-linux-gnu RUN rustup default nightly-2023-06-25-x86_64-unknown-linux-gnu @@ -12,8 +24,6 @@ RUN rustup component add llvm-tools-preview clippy RUN rustup component add rust-src RUN rustup component add rustfmt RUN rustup target add wasm32-unknown-unknown -RUN cargo install cargo-lints -RUN cargo install webassembly-test-runner RUN cargo install cargo-llvm-cov # TODO: Figure out a way to pull in libgit2, which doesn't crash if this useless variable is gone. diff --git a/README.md b/README.md index 6e5a3f3512b..237481c5a48 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ Engage with the community: - [Contribute](./CONTRIBUTING.md) to the repository - [Contact us](./CONTRIBUTING.md#contact) to get help -# System Requirements +## System Requirements RAM and storage requirements depend on your use case: whether you need to build or deploy a network, how big it is, and so on. This table summarises the requirements: @@ -60,7 +60,7 @@ CPU considerations: * Rust compilation highly favours multi-core CPUs such as Apple M1â„¢, AMD Ryzenâ„¢/Threadripperâ„¢/Epycâ„¢, and Intel Alder Lakeâ„¢. * On systems with restricted memory and many CPU cores, Iroha compilation may sometimes fail with `SIGKILL`. To avoid it, restrict the number of CPU cores using `cargo build -j `, where `` (without the angle brackets) is half of your RAM capacity rounded down. -# Build, Test, and Run Iroha +## Build, Test, and Run Iroha Prerequisites: @@ -90,7 +90,7 @@ bash ./scripts/test_env.sh cleanup -## Build Iroha +### Build Iroha - Build Iroha and accompanying binaries: @@ -106,7 +106,7 @@ bash ./scripts/test_env.sh cleanup If you skip this step, the Iroha container will be built using the latest available image. -## Run Iroha +### Run Iroha Once you have built Iroha, you can instantiate the minimum viable network: @@ -122,7 +122,7 @@ cd target/debug ./iroha_client_cli --help ``` -# Integration +## Integration Iroha project mainly consists of the following crates: @@ -139,12 +139,12 @@ Iroha project mainly consists of the following crates: * [`iroha_logger`](logger) uses `tracing` to provide logging facilities. * [`iroha_macro`](macro) provides the convenience macros. * [`iroha_p2p`](p2p) defines peer creation and handshake logic. -* [`iroha_default_validator`](default_validator) defines runtime validation logic. +* [`iroha_default_executor`](default_executor) defines runtime validation logic. * [`iroha_substrate`](substrate) is the bridge substrate `XClaim` external module. * [`iroha_telemetry`](telemetry) is used for monitoring and analysis of telemetry data. * [`iroha_version`](version) provides message versioning for non-simultaneous system updates. -# Maintenance +## Maintenance A brief overview on how to configure and maintain an Iroha instance: @@ -155,17 +155,17 @@ A brief overview on how to configure and maintain an Iroha instance: - [Storage](#storage) - [Scalability](#scalability) -## Configuration +### Configuration You can provide configuration parameters either as a `config.json` or using environment variables. Refer to the [detailed list](./docs/source/references/config.md) of all available configuration parameters. Configuration example you may use as a reference point: [cli/src/samples.rs](./cli/src/samples.rs) -## Endpoints +### Endpoints -You can find the detailed list of all available endpoints in the [API specifications](./docs/source/references/api_spec.md#endpoints). +For a list of all endpoints, available operations, and ways to customize them with parameters, see [API Reference > Torii Endpoints](https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints) -## Logging +### Logging By default, Iroha provides logs in a human-readable format and prints them out to `stdout`. @@ -182,7 +182,7 @@ curl -X POST \ ``` -### JSON Logging Mode +#### JSON Logging Mode Additionally, Iroha supports a JSON logging mode. @@ -190,9 +190,9 @@ To enable it, provide the [logging file](./docs/source/references/config.md#logg [Log rotation](https://www.commandlinux.com/man-page/man5/logrotate.conf.5.html) is the responsibility of the peer administrator. -## Monitoring +### Monitoring -The details of the `Health` endpoint can be found in the [API specifications](./docs/source/references/api_spec.md#health). +The details of the `Health` endpoint can be found in the [API Reference > Torii Endpoints](https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints#health). Iroha can produce both JSON-formatted as well as `prometheus`-readable metrics at the `status` and `metrics` endpoints respectively. @@ -202,28 +202,28 @@ The [`prometheus`](https://prometheus.io/docs/introduction/overview/) monitoring prometheus --config.file=configs/prometheus.yml ``` -## Storage +### Storage The blocks are written to the `blocks` sub-folder, which is created automatically by Iroha in the working directory of the peer. Additionally, if specified, the logging file must also be stored in a user-specified directory. No additional storage is necessary. -## Scalability +### Scalability Multiple instances of Iroha peer and client binaries can be run on the same physical machine and in the same working directory. However, we recommend to give each instance a clean new working directory. The provided `docker-compose` file showcases a minimum viable network and the general methods of using the `hyperledger/iroha2:dev` docker image for deploying a network of peers. -# Further Reading +## Further Reading We encourage you to check out our [Iroha 2 Tutorial](https://hyperledger.github.io/iroha-2-docs/) first. It is suitable for both experienced developers and prospective users of Iroha 2, and it provides language-specific guides for Bash, Python, Rust, Kotlin/Java, and Javascript/TypeScript. -* [Iroha 2 Tutorial](https://hyperledger.github.io/iroha-2-docs/) +* [Iroha 2 Documentation](https://hyperledger.github.io/iroha-2-docs/) + * [Glossary](https://hyperledger.github.io/iroha-2-docs/guide/glossary) + * [Iroha Special Instructions](https://hyperledger.github.io/iroha-2-docs/guide/blockchain/instructions) + * [API Reference](https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints) +* [Configuration Reference](./docs/source/references/config.md) * [Iroha 2 Whitepaper](./docs/source/iroha_2_whitepaper.md) -* [Glossary](https://hyperledger.github.io/iroha-2-docs/guide/glossary.html) -* [Configuration](./docs/source/references/config.md) -* [Iroha Special Instructions](https://hyperledger.github.io/iroha-2-docs/guide/blockchain/instructions.html) -* [API specification](./docs/source/references/api_spec.md) Iroha SDKs: @@ -232,17 +232,17 @@ Iroha SDKs: * [Iroha Javascript](https://github.com/hyperledger/iroha-javascript) * [Iroha iOS Swift](https://github.com/hyperledger/iroha-ios) -# How to Contribute +## How to Contribute We welcome community contributions! Report bugs and suggest improvements via GitHub issues and pull requests. Check out our [contributing guide](./CONTRIBUTING.md) to learn more. -# Get Help +## Get Help Check out the channels you could use to [get help or engage with the community](./CONTRIBUTING.md#contact). -# License +## License Iroha codebase is licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except diff --git a/benchmark_blockstore/blocks.data b/benchmark_blockstore/blocks.data new file mode 100644 index 00000000000..e3c3e57273f Binary files /dev/null and b/benchmark_blockstore/blocks.data differ diff --git a/benchmark_blockstore/blocks.index b/benchmark_blockstore/blocks.index new file mode 100644 index 00000000000..5bec6e06493 Binary files /dev/null and b/benchmark_blockstore/blocks.index differ diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 77d51e16893..e0abd7c480b 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -14,6 +14,9 @@ license.workspace = true keywords.workspace = true categories.workspace = true +[lints] +workspace = true + [features] default = ["bridge", "telemetry", "schema-endpoint"] @@ -44,6 +47,7 @@ iroha_macro = { workspace = true } iroha_logger = { workspace = true } iroha_futures = { workspace = true } iroha_data_model = { workspace = true, features = ["http"] } +iroha_primitives = { workspace = true } iroha_telemetry = { workspace = true, optional = true } iroha_version = { workspace = true, features = ["http"] } iroha_config = { workspace = true } @@ -55,6 +59,7 @@ iroha_genesis = { workspace = true } iroha_wasm_builder = { workspace = true } +derive_more = { workspace = true } async-trait = { workspace = true } color-eyre = { workspace = true } eyre = { workspace = true } @@ -74,10 +79,9 @@ tempfile = { workspace = true } dashmap = { workspace = true } thread-local-panic-hook = { version = "0.1.0", optional = true } -uuid = { version = "1.4.1", features = ["v4"] } [dev-dependencies] -serial_test = "0.8.0" +serial_test = "2.0.0" [build-dependencies] iroha_wasm_builder = { workspace = true } diff --git a/cli/build.rs b/cli/build.rs index 4cc489aa580..23ce6a29d23 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -1,12 +1,12 @@ -//! Build script to extract git hash of iroha build and to check runtime validator +//! Build script to extract git hash of iroha build and to check runtime executor use eyre::{eyre, Result, WrapErr}; -const DEFAULT_VALIDATOR_PATH: &str = "../default_validator"; +const DEFAULT_EXECUTOR_PATH: &str = "../default_executor"; fn main() -> Result<()> { println!("cargo:rerun-if-changed=build.rs"); - println!("cargo:rerun-if-changed={DEFAULT_VALIDATOR_PATH}"); + println!("cargo:rerun-if-changed={DEFAULT_EXECUTOR_PATH}"); extract_git_hash()?; @@ -14,7 +14,7 @@ fn main() -> Result<()> { // the checks are a process that's hard to accomodate // in Nix environment if std::option_env!("IROHA_SKIP_WASM_CHECKS").is_none() { - check_default_validator()?; + check_default_executor()?; } Ok(()) @@ -30,8 +30,8 @@ fn extract_git_hash() -> Result<()> { } /// Apply `cargo check` to the smartcontract. -fn check_default_validator() -> Result<()> { - iroha_wasm_builder::Builder::new(DEFAULT_VALIDATOR_PATH) +fn check_default_executor() -> Result<()> { + iroha_wasm_builder::Builder::new(DEFAULT_EXECUTOR_PATH) .format() .check() } diff --git a/cli/derive/Cargo.toml b/cli/derive/Cargo.toml index 60ea4adab49..d258df86e55 100644 --- a/cli/derive/Cargo.toml +++ b/cli/derive/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true diff --git a/cli/derive/src/lib.rs b/cli/derive/src/lib.rs index 84e4dc8d0ca..00438a1df47 100644 --- a/cli/derive/src/lib.rs +++ b/cli/derive/src/lib.rs @@ -1,5 +1,4 @@ //! Crate with a proc macro for torii endpoint generation -#![allow(clippy::arithmetic_side_effects)] // We should remove `clippy::restriction`. use proc_macro::TokenStream; use proc_macro2::Span; @@ -133,7 +132,7 @@ impl Parse for EndpointList { fn parse(input: ParseStream) -> SynResult { let items = Punctuated::::parse_terminated(input)?; let mut seen_arg_counts = Vec::new(); - for item in items.iter() { + for item in &items { match item { EndpointItem::NameAndArgCount { arg_count, .. } | EndpointItem::ArgCount(arg_count) => { diff --git a/cli/src/event.rs b/cli/src/event.rs index 601ab41bb6f..9a9dcc0ff1d 100644 --- a/cli/src/event.rs +++ b/cli/src/event.rs @@ -1,11 +1,6 @@ //! Iroha is a quite dynamic system so many events can happen. //! This module contains descriptions of such an events and //! utility Iroha Special Instructions to work with them. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use futures::TryStreamExt; use iroha_data_model::events::prelude::*; use iroha_macro::error::ErrorTryFromEnum; @@ -14,7 +9,7 @@ use warp::ws::WebSocket; use crate::stream::{self, Sink, Stream}; /// Type of Stream error -pub type StreamError = stream::Error<>::Err>; +pub type StreamError = stream::Error<>::Err>; /// Type of error for `Consumer` #[derive(thiserror::Error, Debug)] @@ -57,9 +52,7 @@ impl Consumer { /// Can fail due to timeout or without message at websocket or during decoding request #[iroha_futures::telemetry_future] pub async fn new(mut stream: WebSocket) -> Result { - let subscription_request: VersionedEventSubscriptionRequest = stream.recv().await?; - let EventSubscriptionRequest(filter) = subscription_request.into_v1(); - + let EventSubscriptionRequest(filter) = stream.recv().await?; Ok(Consumer { stream, filter }) } @@ -74,7 +67,7 @@ impl Consumer { } self.stream - .send(VersionedEventMessage::from(EventMessage(event))) + .send(EventMessage(event)) .await .map_err(Into::into) } diff --git a/cli/src/lib.rs b/cli/src/lib.rs index 802f9fbc38c..78f2e871ae7 100644 --- a/cli/src/lib.rs +++ b/cli/src/lib.rs @@ -4,11 +4,6 @@ //! //! `Iroha` is the main instance of the peer program. `Arguments` //! should be constructed externally: (see `main.rs`). -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] #[cfg(debug_assertions)] use core::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; @@ -25,6 +20,7 @@ use iroha_core::{ handler::ThreadHandler, kura::Kura, prelude::{World, WorldStateView}, + query::store::LiveQueryStore, queue::Queue, smartcontracts::isi::Registrable as _, snapshot::{try_read_snapshot, SnapshotMaker, SnapshotMakerHandle}, @@ -97,12 +93,13 @@ pub struct Iroha { pub kura: Arc, /// Torii web server pub torii: Option, - /// Snapshot service, + /// Snapshot service pub snapshot_maker: SnapshotMakerHandle, /// Thread handlers thread_handlers: Vec, - /// A boolean value indicating whether or not the peers will recieve data from the network. Used in - /// sumeragi testing. + + /// A boolean value indicating whether or not the peers will receive data from the network. + /// Used in sumeragi testing. #[cfg(debug_assertions)] pub freeze_status: Arc, } @@ -160,9 +157,9 @@ impl NetworkRelay { match msg { SumeragiPacket(data) => { - self.sumeragi.incoming_message(data.into_v1()); + self.sumeragi.incoming_message(*data); } - BlockSync(data) => self.block_sync.message(data.into_v1()).await, + BlockSync(data) => self.block_sync.message(*data).await, TransactionGossiper(data) => self.gossiper.gossip(*data).await, Health => {} } @@ -231,7 +228,7 @@ impl Iroha { telemetry: Option, ) -> Result { let listen_addr = config.torii.p2p_addr.clone(); - let network = IrohaNetwork::start(listen_addr, config.public_key.clone()) + let network = IrohaNetwork::start(listen_addr, config.sumeragi.key_pair.clone()) .await .wrap_err("Unable to start P2P-network")?; @@ -246,13 +243,25 @@ impl Iroha { std::path::Path::new(&config.kura.block_store_path), config.kura.debug_output_new_blocks, )?; + let live_query_store_handle = + LiveQueryStore::from_configuration(config.live_query_store).start(); - let notify_shutdown = Arc::new(Notify::new()); let block_count = kura.init()?; - let wsv = try_read_snapshot(&config.snapshot.dir_path, &kura, block_count).map_or_else( + let wsv = try_read_snapshot( + &config.snapshot.dir_path, + &kura, + live_query_store_handle.clone(), + block_count, + ) + .map_or_else( |error| { iroha_logger::warn!(%error, "Failed to load wsv from snapshot, creating empty wsv"); - WorldStateView::from_configuration(config.wsv, world, Arc::clone(&kura)) + WorldStateView::from_configuration( + *config.wsv, + world, + Arc::clone(&kura), + live_query_store_handle.clone(), + ) }, |wsv| { iroha_logger::info!( @@ -283,6 +292,11 @@ impl Iroha { block_count, }); + if config.exit_after_init { + iroha_logger::error!("Exiting after init due to configuration"); + return Err(eyre!("Exiting after init due to configuration")); + } + let block_sync = BlockSynchronizer::from_configuration( &config.block_sync, sumeragi.clone(), @@ -303,6 +317,8 @@ impl Iroha { #[cfg(debug_assertions)] let freeze_status = Arc::new(AtomicBool::new(false)); + let notify_shutdown = Arc::new(Notify::new()); + NetworkRelay { sumeragi: sumeragi.clone(), block_sync, @@ -323,6 +339,7 @@ impl Iroha { events_sender, Arc::clone(¬ify_shutdown), sumeragi.clone(), + live_query_store_handle, Arc::clone(&kura), ); @@ -495,7 +512,6 @@ mod tests { use super::*; - #[allow(clippy::panic, clippy::print_stdout)] #[tokio::test] #[serial] async fn iroha_should_notify_on_panic() { diff --git a/cli/src/main.rs b/cli/src/main.rs index 61f4295fd5a..e2a07e6ae74 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -1,5 +1,4 @@ //! Iroha peer command-line interface. -#![allow(clippy::print_stdout)] use std::env; use color_eyre::eyre::WrapErr as _; @@ -148,13 +147,11 @@ async fn main() -> Result<(), color_eyre::Report> { Ok(()) } -#[allow(clippy::print_stdout)] fn print_help(styling: &Styling) -> Result<(), std::io::Error> { use std::io::Write; let stdout = std::io::stdout(); let lock = stdout.lock(); - #[allow(clippy::arithmetic_side_effects)] // No overflow let mut buffer = std::io::BufWriter::with_capacity(1024 * REQUIRED_ENV_VARS.len(), lock); writeln!(buffer, "{}", "Iroha 2".bold().green())?; writeln!(buffer, "pass {} for this message", styling.or(&HELP_ARG))?; @@ -206,7 +203,6 @@ as follows:", Ok(()) } -#[allow(clippy::print_stdout)] fn print_version(styling: &Styling) { println!( "{} {} (git hash {}) \n {}: {}", diff --git a/cli/src/samples.rs b/cli/src/samples.rs index 2a064bd9e64..a91c6354356 100644 --- a/cli/src/samples.rs +++ b/cli/src/samples.rs @@ -1,14 +1,14 @@ -#![allow(clippy::restriction)] //! This module contains the sample configurations used for testing and benchmarking throghout Iroha. use std::{collections::HashSet, path::Path, str::FromStr}; use iroha_config::{ iroha::{Configuration, ConfigurationProxy}, sumeragi::TrustedPeers, - torii::{uri::DEFAULT_API_ADDR, DEFAULT_TORII_P2P_ADDR, DEFAULT_TORII_TELEMETRY_ADDR}, + torii::{uri::DEFAULT_API_ADDR, DEFAULT_TORII_P2P_ADDR}, }; use iroha_crypto::{KeyPair, PublicKey}; use iroha_data_model::{peer::PeerId, prelude::*}; +use iroha_primitives::unique_vec::UniqueVec; /// Get sample trusted peers. The public key must be the same as `configuration.public_key` /// @@ -52,7 +52,7 @@ pub fn get_trusted_peers(public_key: Option<&PublicKey>) -> HashSet { /// /// # Panics /// - when [`KeyPair`] generation fails (rare case). -pub fn get_config_proxy(peers: HashSet, key_pair: Option) -> ConfigurationProxy { +pub fn get_config_proxy(peers: UniqueVec, key_pair: Option) -> ConfigurationProxy { let (public_key, private_key) = key_pair .unwrap_or_else(|| KeyPair::generate().expect("Key pair generation failed")) .into(); @@ -60,17 +60,16 @@ pub fn get_config_proxy(peers: HashSet, key_pair: Option) -> Co ConfigurationProxy { public_key: Some(public_key.clone()), private_key: Some(private_key.clone()), - sumeragi: Some(iroha_config::sumeragi::ConfigurationProxy { + sumeragi: Some(Box::new(iroha_config::sumeragi::ConfigurationProxy { max_transactions_in_block: Some(2), trusted_peers: Some(TrustedPeers { peers }), ..iroha_config::sumeragi::ConfigurationProxy::default() - }), - torii: Some(iroha_config::torii::ConfigurationProxy { + })), + torii: Some(Box::new(iroha_config::torii::ConfigurationProxy { p2p_addr: Some(DEFAULT_TORII_P2P_ADDR.clone()), api_url: Some(DEFAULT_API_ADDR.clone()), - telemetry_url: Some(DEFAULT_TORII_TELEMETRY_ADDR.clone()), ..iroha_config::torii::ConfigurationProxy::default() - }), + })), block_sync: Some(iroha_config::block_sync::ConfigurationProxy { block_batch_size: Some(1), gossip_period_ms: Some(500), @@ -79,10 +78,10 @@ pub fn get_config_proxy(peers: HashSet, key_pair: Option) -> Co queue: Some(iroha_config::queue::ConfigurationProxy { ..iroha_config::queue::ConfigurationProxy::default() }), - genesis: Some(iroha_config::genesis::ConfigurationProxy { + genesis: Some(Box::new(iroha_config::genesis::ConfigurationProxy { account_private_key: Some(Some(private_key)), account_public_key: Some(public_key), - }), + })), ..ConfigurationProxy::default() } } @@ -94,22 +93,22 @@ pub fn get_config_proxy(peers: HashSet, key_pair: Option) -> Co /// /// # Panics /// - when [`KeyPair`] generation fails (rare case). -pub fn get_config(trusted_peers: HashSet, key_pair: Option) -> Configuration { +pub fn get_config(trusted_peers: UniqueVec, key_pair: Option) -> Configuration { get_config_proxy(trusted_peers, key_pair) .build() .expect("Iroha config should build as all required fields were provided") } -/// Construct validator from path. +/// Construct executor from path. /// /// `relative_path` should be relative to `CARGO_MANIFEST_DIR`. /// /// # Errors /// -/// - Failed to create temp dir for validator output -/// - Failed to build validator -/// - Failed to optimize validator -pub fn construct_validator

(relative_path: &P) -> color_eyre::Result +/// - Failed to create temp dir for executor output +/// - Failed to build executor +/// - Failed to optimize executor +pub fn construct_executor

(relative_path: &P) -> color_eyre::Result where P: AsRef + ?Sized, { @@ -118,5 +117,5 @@ where .optimize()? .into_bytes()?; - Ok(Validator::new(WasmSmartContract::from_compiled(wasm_blob))) + Ok(Executor::new(WasmSmartContract::from_compiled(wasm_blob))) } diff --git a/cli/src/stream.rs b/cli/src/stream.rs index 340923c9426..b776907260c 100644 --- a/cli/src/stream.rs +++ b/cli/src/stream.rs @@ -6,6 +6,7 @@ use core::{result::Result, time::Duration}; use futures::{SinkExt, StreamExt}; use iroha_version::prelude::*; +use parity_scale_codec::DecodeAll; #[cfg(test)] const TIMEOUT: Duration = Duration::from_millis(10_000); @@ -34,7 +35,7 @@ where /// Unexpected non-binary message received NonBinaryMessage, /// Error during versioned message decoding - IrohaVersion(#[from] iroha_version::error::Error), + Decode(#[from] parity_scale_codec::Error), } /// Represents message used by the stream @@ -56,7 +57,7 @@ pub trait StreamMessage { #[async_trait::async_trait] pub trait Sink: SinkExt + Unpin where - S: EncodeVersioned + Send + Sync + 'static, + S: Encode + Send + Sync + 'static, { /// Error type returned by the sink type Err: std::error::Error + Send + Sync + 'static; @@ -68,10 +69,7 @@ where async fn send(&mut self, message: S) -> Result<(), Error> { tokio::time::timeout( TIMEOUT, - >::send( - self, - Self::Message::binary(message.encode_versioned()), - ), + >::send(self, Self::Message::binary(message.encode())), ) .await .map_err(|_err| Error::SendTimeout)? @@ -81,7 +79,7 @@ where /// Trait for reading custom messages from stream #[async_trait::async_trait] -pub trait Stream: +pub trait Stream: StreamExt> + Unpin { /// Error type returned by the stream @@ -106,9 +104,7 @@ pub trait Stream: return Err(Error::NonBinaryMessage); } - Ok(R::decode_all_versioned( - subscription_request_message.as_bytes(), - )?) + Ok(R::decode_all(&mut subscription_request_message.as_bytes())?) } } @@ -133,14 +129,14 @@ impl StreamMessage for warp::ws::Message { #[async_trait::async_trait] impl Sink for warp::ws::WebSocket where - M: EncodeVersioned + Send + Sync + 'static, + M: Encode + Send + Sync + 'static, { type Err = warp::Error; type Message = warp::ws::Message; } #[async_trait::async_trait] -impl Stream for warp::ws::WebSocket { +impl Stream for warp::ws::WebSocket { type Err = warp::Error; type Message = warp::ws::Message; } @@ -152,14 +148,14 @@ mod ws_client { use super::*; #[async_trait::async_trait] - impl Stream for WsClient { + impl Stream for WsClient { type Err = warp::test::WsError; type Message = warp::ws::Message; } #[async_trait::async_trait] impl Sink for WsClient where - M: EncodeVersioned + Send + Sync + 'static, + M: Encode + Send + Sync + 'static, { type Err = warp::test::WsError; type Message = warp::ws::Message; diff --git a/cli/src/torii/mod.rs b/cli/src/torii/mod.rs index be3dce65184..9594362ab5d 100644 --- a/cli/src/torii/mod.rs +++ b/cli/src/torii/mod.rs @@ -7,21 +7,18 @@ use std::{ fmt::{Debug, Write as _}, net::ToSocketAddrs, sync::Arc, - time::{Duration, Instant}, }; -use dashmap::DashMap; use futures::{stream::FuturesUnordered, StreamExt}; use iroha_core::{ kura::Kura, prelude::*, + query::store::LiveQueryStoreHandle, queue::{self, Queue}, sumeragi::SumeragiHandle, EventsSender, }; -use iroha_data_model::Value; -use parity_scale_codec::Encode; -use tokio::{sync::Notify, time::sleep}; +use tokio::sync::Notify; use utils::*; use warp::{ http::StatusCode, @@ -30,56 +27,10 @@ use warp::{ Filter as _, Reply, }; -use self::cursor::Batched; - #[macro_use] pub(crate) mod utils; -mod cursor; -mod pagination; mod routing; -type LiveQuery = Batched>; - -#[derive(Default)] -struct LiveQueryStore { - queries: DashMap<(String, Vec), (LiveQuery, Instant)>, -} - -impl LiveQueryStore { - fn insert(&self, query_id: String, request: T, live_query: LiveQuery) { - self.queries - .insert((query_id, request.encode()), (live_query, Instant::now())); - } - - fn remove(&self, query_id: &str, request: &T) -> Option { - self.queries - .remove(&(query_id.to_string(), request.encode())) - .map(|(_, (output, _))| output) - } - - fn expired_query_cleanup( - self: Arc, - idle_time: Duration, - notify_shutdown: Arc, - ) -> tokio::task::JoinHandle<()> { - tokio::task::spawn(async move { - loop { - tokio::select! { - _ = sleep(idle_time) => { - self.queries - .retain(|_, (_, last_access_time)| last_access_time.elapsed() <= idle_time); - }, - _ = notify_shutdown.notified() => { - iroha_logger::info!("Query cleanup service is being shut down."); - break; - } - else => break, - } - } - }) - } -} - /// Main network handler and the only entrypoint of the Iroha. pub struct Torii { iroha_cfg: super::Configuration, @@ -87,14 +38,14 @@ pub struct Torii { events: EventsSender, notify_shutdown: Arc, sumeragi: SumeragiHandle, - query_store: Arc, + query_service: LiveQueryStoreHandle, kura: Arc, } /// Torii errors. #[derive(Debug, thiserror::Error, displaydoc::Display)] pub enum Error { - /// Failed to execute or validate query + /// Failed to process query Query(#[from] iroha_data_model::ValidationFail), /// Failed to accept transaction AcceptTransaction(#[from] iroha_core::tx::AcceptTransactionFail), @@ -107,43 +58,18 @@ pub enum Error { #[cfg(feature = "telemetry")] /// Error while getting Prometheus metrics Prometheus(#[source] eyre::Report), - /// Error while resuming cursor - UnknownCursor, -} - -/// Status code for query error response. -fn query_status_code(validation_error: &iroha_data_model::ValidationFail) -> StatusCode { - use iroha_data_model::{ - isi::error::InstructionExecutionError, query::error::QueryExecutionFail::*, - ValidationFail::*, - }; - - match validation_error { - NotPermitted(_) => StatusCode::FORBIDDEN, - QueryFailed(query_error) - | InstructionFailed(InstructionExecutionError::Query(query_error)) => match query_error { - Evaluate(_) | Conversion(_) => StatusCode::BAD_REQUEST, - Signature(_) | Unauthorized => StatusCode::UNAUTHORIZED, - Find(_) => StatusCode::NOT_FOUND, - }, - TooComplex => StatusCode::UNPROCESSABLE_ENTITY, - InternalError(_) => StatusCode::INTERNAL_SERVER_ERROR, - InstructionFailed(error) => { - iroha_logger::error!( - ?error, - "Query validation failed with unexpected error. This means a bug inside Runtime Validator", - ); - StatusCode::INTERNAL_SERVER_ERROR - } - } + /// Internal error while getting status + StatusFailure(#[source] eyre::Report), + /// Cannot find status segment by provided path + StatusSegmentNotFound(#[source] eyre::Report), } impl Reply for Error { fn into_response(self) -> Response { - use Error::*; match self { - Query(err) => { - reply::with_status(utils::Scale(&err), query_status_code(&err)).into_response() + Self::Query(err) => { + reply::with_status(utils::Scale(&err), Self::query_status_code(&err)) + .into_response() } _ => reply::with_status(Self::to_string(&self), self.status_code()).into_response(), } @@ -153,19 +79,47 @@ impl Reply for Error { impl Error { fn status_code(&self) -> StatusCode { use Error::*; + match self { - Query(e) => query_status_code(e), - AcceptTransaction(_) | ConfigurationReload(_) | UnknownCursor => { - StatusCode::BAD_REQUEST - } - Config(_) => StatusCode::NOT_FOUND, + Query(e) => Self::query_status_code(e), + AcceptTransaction(_) | ConfigurationReload(_) => StatusCode::BAD_REQUEST, + Config(_) | StatusSegmentNotFound(_) => StatusCode::NOT_FOUND, PushIntoQueue(err) => match **err { queue::Error::Full => StatusCode::INTERNAL_SERVER_ERROR, queue::Error::SignatureCondition { .. } => StatusCode::UNAUTHORIZED, _ => StatusCode::BAD_REQUEST, }, #[cfg(feature = "telemetry")] - Prometheus(_) => StatusCode::INTERNAL_SERVER_ERROR, + Prometheus(_) | StatusFailure(_) => StatusCode::INTERNAL_SERVER_ERROR, + } + } + + fn query_status_code(validation_error: &iroha_data_model::ValidationFail) -> StatusCode { + use iroha_data_model::{ + isi::error::InstructionExecutionError, query::error::QueryExecutionFail::*, + ValidationFail::*, + }; + + match validation_error { + NotPermitted(_) => StatusCode::FORBIDDEN, + QueryFailed(query_error) + | InstructionFailed(InstructionExecutionError::Query(query_error)) => match query_error + { + Evaluate(_) | Conversion(_) | UnknownCursor | FetchSizeTooBig => { + StatusCode::BAD_REQUEST + } + Signature(_) => StatusCode::UNAUTHORIZED, + Find(_) => StatusCode::NOT_FOUND, + }, + TooComplex => StatusCode::UNPROCESSABLE_ENTITY, + InternalError(_) => StatusCode::INTERNAL_SERVER_ERROR, + InstructionFailed(error) => { + iroha_logger::error!( + ?error, + "Query validation failed with unexpected error. This means a bug inside Runtime Executor", + ); + StatusCode::INTERNAL_SERVER_ERROR + } } } diff --git a/cli/src/torii/routing.rs b/cli/src/torii/routing.rs index 468a95b877e..48330bdffb9 100644 --- a/cli/src/torii/routing.rs +++ b/cli/src/torii/routing.rs @@ -5,13 +5,7 @@ // FIXME: This can't be fixed, because one trait in `warp` is private. #![allow(opaque_hidden_inferred_bound)] -use std::{ - cmp::Ordering, - num::{NonZeroU64, NonZeroUsize}, -}; - -use cursor::Batch; -use eyre::WrapErr; +use eyre::{eyre, WrapErr}; use futures::TryStreamExt; use iroha_config::{ base::proxy::Documented, @@ -20,29 +14,50 @@ use iroha_config::{ GetConfiguration, PostConfiguration, }; use iroha_core::{ - smartcontracts::{isi::query::ValidQueryRequest, query::LazyValue}, + query::{pagination::Paginate, store::LiveQueryStoreHandle}, + smartcontracts::query::ValidQueryRequest, sumeragi::SumeragiHandle, }; use iroha_data_model::{ block::{ - stream::{ - BlockMessage, BlockSubscriptionRequest, VersionedBlockMessage, - VersionedBlockSubscriptionRequest, - }, - VersionedCommittedBlock, + stream::{BlockMessage, BlockSubscriptionRequest}, + SignedBlock, }, - http::{BatchedResponse, VersionedBatchedResponse}, prelude::*, - query::{ForwardCursor, Pagination, Sorting}, + query::{ + cursor::ForwardCursor, http, sorting::Sorting, Pagination, QueryRequest, + QueryWithParameters, + }, + BatchedResponse, }; #[cfg(feature = "telemetry")] use iroha_telemetry::metrics::Status; -use pagination::Paginate; use tokio::task; use super::*; use crate::stream::{Sink, Stream}; +/// Filter for warp which extracts [`http::ClientQueryRequest`] +fn client_query_request( +) -> impl warp::Filter + Copy { + body::versioned::() + .and(sorting()) + .and(paginate()) + .and(fetch_size()) + .and_then(|signed_query, sorting, pagination, fetch_size| async move { + Result::<_, std::convert::Infallible>::Ok(http::ClientQueryRequest::query( + signed_query, + sorting, + pagination, + fetch_size, + )) + }) + .or(cursor().and_then(|cursor| async move { + Result::<_, std::convert::Infallible>::Ok(http::ClientQueryRequest::cursor(cursor)) + })) + .unify() +} + /// Filter for warp which extracts sorting fn sorting() -> impl warp::Filter + Copy { warp::query() @@ -58,11 +73,16 @@ fn paginate() -> impl warp::Filter impl warp::Filter + Copy { + warp::query() +} + #[iroha_futures::telemetry_future] async fn handle_instructions( queue: Arc, sumeragi: SumeragiHandle, - transaction: VersionedSignedTransaction, + transaction: SignedTransaction, ) -> Result { let wsv = sumeragi.wsv_clone(); let transaction_limits = wsv.config.transaction_limits; @@ -84,122 +104,33 @@ async fn handle_instructions( #[iroha_futures::telemetry_future] async fn handle_queries( + live_query_store: LiveQueryStoreHandle, sumeragi: SumeragiHandle, - query_store: Arc, - fetch_size: NonZeroUsize, - - request: VersionedSignedQuery, - sorting: Sorting, - pagination: Pagination, - - cursor: ForwardCursor, -) -> Result>> { - let valid_request = sumeragi.apply_wsv(|wsv| ValidQueryRequest::validate(request, wsv))?; - let request_id = (&valid_request, &sorting, &pagination); - - if let Some(query_id) = cursor.query_id { - let live_query = query_store - .remove(&query_id, &request_id) - .ok_or(Error::UnknownCursor)?; - - return construct_query_response( - request_id, - &query_store, - query_id, - cursor.cursor.map(NonZeroU64::get), - live_query, - ); - } - - sumeragi.apply_wsv(|wsv| { - let res = valid_request.execute(wsv).map_err(ValidationFail::from)?; - - match res { - LazyValue::Value(batch) => { - let cursor = ForwardCursor::default(); - let result = BatchedResponse { batch, cursor }; - Ok(Scale(result.into())) - } - LazyValue::Iter(iter) => { - let live_query = apply_sorting_and_pagination(iter, &sorting, pagination); - let query_id = uuid::Uuid::new_v4().to_string(); - - let curr_cursor = Some(0); - let live_query = live_query.batched(fetch_size); - construct_query_response( - request_id, - &query_store, - query_id, - curr_cursor, - live_query, - ) - } - } - }) -} - -fn construct_query_response( - request_id: (&ValidQueryRequest, &Sorting, &Pagination), - query_store: &LiveQueryStore, - query_id: String, - curr_cursor: Option, - mut live_query: Batched>, -) -> Result>> { - let (batch, next_cursor) = live_query.next_batch(curr_cursor)?; - - if !live_query.is_depleted() { - query_store.insert(query_id.clone(), request_id, live_query); - } - - let query_response = BatchedResponse { - batch: Value::Vec(batch), - cursor: ForwardCursor { - query_id: Some(query_id), - cursor: next_cursor, - }, - }; - - Ok(Scale(query_response.into())) -} -fn apply_sorting_and_pagination( - iter: impl Iterator, - sorting: &Sorting, - pagination: Pagination, -) -> Vec { - if let Some(key) = &sorting.sort_by_metadata_key { - let mut pairs: Vec<(Option, Value)> = iter - .map(|value| { - let key = match &value { - Value::Identifiable(IdentifiableBox::Asset(asset)) => match asset.value() { - AssetValue::Store(store) => store.get(key).cloned(), - _ => None, - }, - Value::Identifiable(v) => TryInto::<&dyn HasMetadata>::try_into(v) - .ok() - .and_then(|has_metadata| has_metadata.metadata().get(key)) - .cloned(), - _ => None, - }; - (key, value) - }) - .collect(); - pairs.sort_by( - |(left_key, _), (right_key, _)| match (left_key, right_key) { - (Some(l), Some(r)) => l.cmp(r), - (Some(_), None) => Ordering::Less, - (None, Some(_)) => Ordering::Greater, - (None, None) => Ordering::Equal, - }, - ); - pairs - .into_iter() - .map(|(_, val)| val) - .paginate(pagination) - .collect() - } else { - iter.paginate(pagination).collect() - } + query_request: http::ClientQueryRequest, +) -> Result>> { + let handle = tokio::task::spawn_blocking(move || match query_request.0 { + QueryRequest::Query(QueryWithParameters { + query: signed_query, + sorting, + pagination, + fetch_size, + }) => sumeragi.apply_wsv(|wsv| { + let valid_query = ValidQueryRequest::validate(signed_query, wsv)?; + let query_output = valid_query.execute(wsv)?; + live_query_store + .handle_query_output(query_output, &sorting, pagination, fetch_size) + .map_err(ValidationFail::from) + }), + QueryRequest::Cursor(cursor) => live_query_store + .handle_query_cursor(cursor) + .map_err(ValidationFail::from), + }); + handle + .await + .expect("Failed to join query handling task") + .map(Scale) + .map_err(Into::into) } #[derive(serde::Serialize)] @@ -223,7 +154,7 @@ async fn handle_pending_transactions( queue: Arc, sumeragi: SumeragiHandle, pagination: Pagination, -) -> Result>> { +) -> Result>> { let query_response = sumeragi.apply_wsv(|wsv| { queue .all_transactions(wsv) @@ -278,8 +209,7 @@ async fn handle_post_configuration( #[iroha_futures::telemetry_future] async fn handle_blocks_stream(kura: Arc, mut stream: WebSocket) -> eyre::Result<()> { - let subscription_request: VersionedBlockSubscriptionRequest = stream.recv().await?; - let BlockSubscriptionRequest(mut from_height) = subscription_request.into_v1(); + let BlockSubscriptionRequest(mut from_height) = stream.recv().await?; let mut interval = tokio::time::interval(std::time::Duration::from_millis(10)); loop { @@ -307,10 +237,8 @@ async fn handle_blocks_stream(kura: Arc, mut stream: WebSocket) -> eyre::R _ = interval.tick() => { if let Some(block) = kura.get_block_by_height(from_height.get()) { stream - // TODO: to avoid clone `VersionedBlockMessage` could be split into sending and receiving parts - .send(VersionedBlockMessage::from( - BlockMessage(VersionedCommittedBlock::clone(&block)), - )) + // TODO: to avoid clone `BlockMessage` could be split into sending and receiving parts + .send(BlockMessage(SignedBlock::clone(&block))) .await?; from_height = from_height.checked_add(1).expect("Maximum block height is achieved."); } @@ -426,41 +354,54 @@ fn handle_metrics(sumeragi: &SumeragiHandle) -> Result { .map_err(Error::Prometheus) } -#[cfg(feature = "telemetry")] -#[allow(clippy::unnecessary_wraps)] -fn handle_status(sumeragi: &SumeragiHandle) -> Result { +fn update_metrics_gracefully(sumeragi: &SumeragiHandle) { if let Err(error) = sumeragi.update_metrics() { iroha_logger::error!(%error, "Error while calling `sumeragi::update_metrics`."); } - let status = Status::from(&sumeragi.metrics()); - Ok(reply::json(&status)) } #[cfg(feature = "telemetry")] -#[allow(clippy::unused_async)] -async fn handle_status_precise(sumeragi: SumeragiHandle, segment: String) -> Result { - if let Err(error) = sumeragi.update_metrics() { - iroha_logger::error!(%error, "Error while calling `sumeragi::update_metrics`."); - } - // TODO: This probably can be optimised to elide the full - // structure. Ideally there should remain a list of fields and - // field aliases somewhere in `serde` macro output, which can - // elide the creation of the value, and directly read the value - // behind the mutex. +#[allow(clippy::unnecessary_wraps)] +fn handle_status( + sumeragi: &SumeragiHandle, + accept: Option>, + tail: &warp::path::Tail, +) -> Result { + use eyre::ContextCompat; + + update_metrics_gracefully(sumeragi); let status = Status::from(&sumeragi.metrics()); - match serde_json::to_value(status) { - Ok(value) => Ok(value - .get(segment) - .map_or_else(|| reply::json(&value), reply::json)), - Err(err) => { - iroha_logger::error!(%err, "Error while converting to JSON value"); - Ok(reply::json(&None::)) + + let tail = tail.as_str(); + if tail.is_empty() { + if accept.is_some_and(|x| x.as_ref() == PARITY_SCALE_MIME_TYPE) { + Ok(Scale(status).into_response()) + } else { + Ok(reply::json(&status).into_response()) } + } else { + // TODO: This probably can be optimised to elide the full + // structure. Ideally there should remain a list of fields and + // field aliases somewhere in `serde` macro output, which can + // elide the creation of the value, and directly read the value + // behind the mutex. + let value = serde_json::to_value(status) + .wrap_err("Failed to serialize JSON") + .map_err(Error::StatusFailure)?; + + let reply = tail + .split('/') + .try_fold(&value, serde_json::Value::get) + .wrap_err_with(|| eyre!("Path not found: \"{}\"", tail)) + .map_err(Error::StatusSegmentNotFound) + .map(|segment| reply::json(segment).into_response())?; + + Ok(reply) } } impl Torii { - /// Construct `Torii` from `ToriiConfiguration`. + /// Construct `Torii`. #[allow(clippy::too_many_arguments)] pub fn from_configuration( iroha_cfg: Configuration, @@ -468,54 +409,22 @@ impl Torii { events: EventsSender, notify_shutdown: Arc, sumeragi: SumeragiHandle, + query_service: LiveQueryStoreHandle, kura: Arc, ) -> Self { Self { iroha_cfg, - events, queue, + events, notify_shutdown, sumeragi, - query_store: Arc::default(), + query_service, kura, } } - #[cfg(feature = "telemetry")] - /// Helper function to create router. This router can tested without starting up an HTTP server - fn create_telemetry_router( - &self, - ) -> impl warp::Filter + Clone + Send { - let status_path = warp::path(uri::STATUS); - let get_router_status_precise = endpoint2( - handle_status_precise, - status_path - .and(add_state!(self.sumeragi.clone())) - .and(warp::path::param()), - ); - let get_router_status_bare = - status_path - .and(add_state!(self.sumeragi.clone())) - .and_then(|sumeragi| async move { - Ok::<_, Infallible>(WarpResult(handle_status(&sumeragi))) - }); - let get_router_metrics = warp::path(uri::METRICS) - .and(add_state!(self.sumeragi)) - .and_then(|sumeragi| async move { - Ok::<_, Infallible>(WarpResult(handle_metrics(&sumeragi))) - }); - let get_api_version = warp::path(uri::API_VERSION) - .and(add_state!(self.sumeragi.clone())) - .and_then(|sumeragi| async { Ok::<_, Infallible>(handle_version(sumeragi).await) }); - - warp::get() - .and(get_router_status_precise.or(get_router_status_bare)) - .or(get_router_metrics) - .or(get_api_version) - .with(warp::trace::request()) - } - /// Helper function to create router. This router can tested without starting up an HTTP server + #[allow(clippy::too_many_lines)] fn create_api_router(&self) -> impl warp::Filter + Clone + Send { let health_route = warp::get() .and(warp::path(uri::HEALTH)) @@ -536,6 +445,28 @@ impl Torii { )), ); + let get_router_status = warp::path(uri::STATUS) + .and(add_state!(self.sumeragi.clone())) + .and(warp::header::optional(warp::http::header::ACCEPT.as_str())) + .and(warp::path::tail()) + .and_then(|sumeragi, accept: Option, tail| async move { + Ok::<_, Infallible>(WarpResult(handle_status(&sumeragi, accept.as_ref(), &tail))) + }); + let get_router_metrics = warp::path(uri::METRICS) + .and(add_state!(self.sumeragi)) + .and_then(|sumeragi| async move { + Ok::<_, Infallible>(WarpResult(handle_metrics(&sumeragi))) + }); + let get_api_version = warp::path(uri::API_VERSION) + .and(add_state!(self.sumeragi.clone())) + .and_then(|sumeragi| async { Ok::<_, Infallible>(handle_version(sumeragi).await) }); + + #[cfg(feature = "telemetry")] + let get_router = get_router.or(warp::any() + .and(get_router_status) + .or(get_router_metrics) + .or(get_api_version)); + #[cfg(feature = "schema-endpoint")] let get_router = get_router.or(warp::path(uri::SCHEMA) .and_then(|| async { Ok::<_, Infallible>(handle_schema().await) })); @@ -551,19 +482,11 @@ impl Torii { )) .and(body::versioned()), ) - .or(endpoint7( + .or(endpoint3( handle_queries, warp::path(uri::QUERY) - .and(add_state!( - self.sumeragi, - self.query_store, - NonZeroUsize::try_from(self.iroha_cfg.torii.fetch_size) - .expect("u64 should always fit into usize"), - )) - .and(body::versioned()) - .and(sorting()) - .and(paginate()) - .and(cursor()), + .and(add_state!(self.query_service, self.sumeragi,)) + .and(client_query_request()), )) .or(endpoint2( handle_post_configuration, @@ -623,37 +546,6 @@ impl Torii { .with(warp::trace::request())) } - /// Start status and metrics endpoints. - /// - /// # Errors - /// Can fail due to listening to network or if http server fails - #[cfg(feature = "telemetry")] - fn start_telemetry(self: Arc) -> eyre::Result>> { - let telemetry_url = &self.iroha_cfg.torii.telemetry_url; - - let mut handles = vec![]; - match telemetry_url.to_socket_addrs() { - Ok(addrs) => { - for addr in addrs { - let torii = Arc::clone(&self); - - let telemetry_router = torii.create_telemetry_router(); - let signal_fut = async move { torii.notify_shutdown.notified().await }; - let (_, serve_fut) = - warp::serve(telemetry_router).bind_with_graceful_shutdown(addr, signal_fut); - - handles.push(task::spawn(serve_fut)); - } - - Ok(handles) - } - Err(error) => { - iroha_logger::error!(%telemetry_url, ?error, "Telemetry address configuration parse error"); - Err(eyre::Error::new(error)) - } - } - } - /// Start main api endpoints. /// /// # Errors @@ -690,18 +582,10 @@ impl Torii { /// Can fail due to listening to network or if http server fails #[iroha_futures::telemetry_future] pub(crate) async fn start(self) -> eyre::Result<()> { - let query_idle_time = Duration::from_millis(self.iroha_cfg.torii.query_idle_time_ms.get()); - let torii = Arc::new(self); let mut handles = vec![]; - #[cfg(feature = "telemetry")] - handles.extend(Arc::clone(&torii).start_telemetry()?); handles.extend(Arc::clone(&torii).start_api()?); - handles.push( - Arc::clone(&torii.query_store) - .expired_query_cleanup(query_idle_time, Arc::clone(&torii.notify_shutdown)), - ); handles .into_iter() diff --git a/cli/src/torii/utils.rs b/cli/src/torii/utils.rs index 7d590ff4b48..6a5e2432fa6 100644 --- a/cli/src/torii/utils.rs +++ b/cli/src/torii/utils.rs @@ -1,7 +1,12 @@ use std::convert::Infallible; use iroha_version::prelude::*; -use warp::{hyper::body::Bytes, reply::Response, Filter, Rejection, Reply}; +use warp::{ + http::{header::CONTENT_TYPE, HeaderValue}, + hyper::body::Bytes, + reply::Response, + Filter, Rejection, Reply, +}; /// Structure for empty response body #[derive(Clone, Copy)] @@ -13,12 +18,22 @@ impl Reply for Empty { } } -/// Structure for response in scale codec in body +/// MIME used in Torii for SCALE encoding +// note: no elegant way to associate it with generic `Scale` +pub const PARITY_SCALE_MIME_TYPE: &'_ str = "application/x-parity-scale"; + +/// Structure to reply using SCALE encoding +#[derive(Debug)] pub struct Scale(pub T); impl Reply for Scale { fn into_response(self) -> Response { - Response::new(self.0.encode().into()) + let mut res = Response::new(self.0.encode().into()); + res.headers_mut().insert( + CONTENT_TYPE, + HeaderValue::from_static(PARITY_SCALE_MIME_TYPE), + ); + res } } diff --git a/client/Cargo.toml b/client/Cargo.toml index 11b16f5d52f..2424cfeca5c 100644 --- a/client/Cargo.toml +++ b/client/Cargo.toml @@ -14,11 +14,35 @@ license.workspace = true keywords.workspace = true categories.workspace = true +[lints] +workspace = true + [badges] is-it-maintained-issue-resolution = { repository = "https://github.com/hyperledger/iroha" } is-it-maintained-open-issues = { repository = "https://github.com/hyperledger/iroha" } maintenance = { status = "actively-developed" } +[features] +# Use rustls by default to avoid OpenSSL dependency, simplifying compilation with musl +default = ["tls-rustls-native-roots"] + +tls-native = [ + "attohttpc/tls-native", + "tokio-tungstenite/native-tls", +] +tls-native-vendored = [ + "attohttpc/tls-native-vendored", + "tokio-tungstenite/native-tls-vendored", +] +tls-rustls-native-roots = [ + "attohttpc/tls-rustls-native-roots", + "tokio-tungstenite/rustls-tls-native-roots", +] +tls-rustls-webpki-roots = [ + "attohttpc/tls-rustls-webpki-roots", + "tokio-tungstenite/rustls-tls-webpki-roots", +] + [dependencies] iroha_config = { workspace = true } iroha_crypto = { workspace = true } @@ -28,9 +52,9 @@ iroha_logger = { workspace = true } iroha_telemetry = { workspace = true } iroha_version = { workspace = true, features = ["http"] } -attohttpc = "0.18.0" +attohttpc = { version = "0.26.1", default-features = false } eyre = { workspace = true } -http = "0.2.8" +http = "0.2.9" url = { workspace = true } rand = { workspace = true } serde = { workspace = true, features = ["derive"] } @@ -41,10 +65,8 @@ displaydoc = { workspace = true } derive_more = { workspace = true } parity-scale-codec = { workspace = true, default-features = false, features = ["derive"] } tokio = { workspace = true, features = ["rt"] } -# TODO: migrate to tokio-tungstenite 0.17 (or newer) and use the workspace dependency -tokio-tungstenite = { version = "0.16.1", features = ["native-tls"] } -tungstenite = { version = "0.16", features = ["native-tls"] } -futures-util = "0.3.25" +tokio-tungstenite = { workspace = true } +futures-util = "0.3.28" [dev-dependencies] iroha_wasm_builder = { workspace = true } diff --git a/client/benches/torii.rs b/client/benches/torii.rs index 13045603e80..c35d90618e4 100644 --- a/client/benches/torii.rs +++ b/client/benches/torii.rs @@ -1,14 +1,15 @@ -#![allow(missing_docs, clippy::pedantic, clippy::restriction)] +#![allow(missing_docs, clippy::pedantic)] use std::thread; use criterion::{criterion_group, criterion_main, Criterion, Throughput}; -use iroha::samples::{construct_validator, get_config}; +use iroha::samples::{construct_executor, get_config}; use iroha_client::client::{asset, Client}; use iroha_config::base::runtime_upgrades::Reload; use iroha_crypto::KeyPair; use iroha_data_model::prelude::*; use iroha_genesis::{GenesisNetwork, RawGenesisBlockBuilder}; +use iroha_primitives::unique_vec; use iroha_version::Encode; use test_network::{get_key_pair, Peer as TestPeer, PeerBuilder, TestRuntime}; use tokio::runtime::Runtime; @@ -17,22 +18,19 @@ const MINIMUM_SUCCESS_REQUEST_RATIO: f32 = 0.9; fn query_requests(criterion: &mut Criterion) { let mut peer = ::new().expect("Failed to create peer"); - let configuration = get_config( - std::iter::once(peer.id.clone()).collect(), - Some(get_key_pair()), - ); + let configuration = get_config(unique_vec![peer.id.clone()], Some(get_key_pair())); let rt = Runtime::test(); let genesis = GenesisNetwork::from_configuration( - RawGenesisBlockBuilder::new() + RawGenesisBlockBuilder::default() .domain("wonderland".parse().expect("Valid")) .account( "alice".parse().expect("Valid"), get_key_pair().public_key().clone(), ) .finish_domain() - .validator( - construct_validator("../default_validator").expect("Failed to construct validator"), + .executor( + construct_executor("../default_executor").expect("Failed to construct executor"), ) .build(), Some(&configuration.genesis), @@ -51,16 +49,16 @@ fn query_requests(criterion: &mut Criterion) { .expect("Should not fail"); let mut group = criterion.benchmark_group("query-requests"); let domain_id: DomainId = "domain".parse().expect("Valid"); - let create_domain = RegisterBox::new(Domain::new(domain_id.clone())); + let create_domain = RegisterExpr::new(Domain::new(domain_id.clone())); let account_id = AccountId::new("account".parse().expect("Valid"), domain_id.clone()); let (public_key, _) = KeyPair::generate() .expect("Failed to generate KeyPair") .into(); - let create_account = RegisterBox::new(Account::new(account_id.clone(), [public_key])); + let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); let asset_definition_id = AssetDefinitionId::new("xor".parse().expect("Valid"), domain_id); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); let quantity: u32 = 200; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new(asset_definition_id, account_id.clone())), ); @@ -71,7 +69,7 @@ fn query_requests(criterion: &mut Criterion) { let iroha_client = Client::new(&client_config).expect("Invalid client configuration"); thread::sleep(std::time::Duration::from_millis(5000)); - let instructions: [InstructionBox; 4] = [ + let instructions: [InstructionExpr; 4] = [ create_domain.into(), create_account.into(), create_asset.into(), @@ -88,10 +86,11 @@ fn query_requests(criterion: &mut Criterion) { let _dropable = group.throughput(Throughput::Bytes(request.encode().len() as u64)); let _dropable2 = group.bench_function("query", |b| { b.iter(|| { - match iroha_client + let iter: Result, _> = iroha_client .request(request.clone()) - .and_then(|iter| iter.collect::, _>>()) - { + .and_then(Iterator::collect); + + match iter { Ok(assets) => { assert!(!assets.is_empty()); success_count += 1; @@ -117,20 +116,17 @@ fn instruction_submits(criterion: &mut Criterion) { println!("instruction submits"); let rt = Runtime::test(); let mut peer = ::new().expect("Failed to create peer"); - let configuration = get_config( - std::iter::once(peer.id.clone()).collect(), - Some(get_key_pair()), - ); + let configuration = get_config(unique_vec![peer.id.clone()], Some(get_key_pair())); let genesis = GenesisNetwork::from_configuration( - RawGenesisBlockBuilder::new() + RawGenesisBlockBuilder::default() .domain("wonderland".parse().expect("Valid")) .account( "alice".parse().expect("Valid"), configuration.public_key.clone(), ) .finish_domain() - .validator( - construct_validator("../default_validator").expect("Failed to construct validator"), + .executor( + construct_executor("../default_executor").expect("Failed to construct executor"), ) .build(), Some(&configuration.genesis), @@ -142,12 +138,12 @@ fn instruction_submits(criterion: &mut Criterion) { rt.block_on(builder.start_with_peer(&mut peer)); let mut group = criterion.benchmark_group("instruction-requests"); let domain_id: DomainId = "domain".parse().expect("Valid"); - let create_domain = RegisterBox::new(Domain::new(domain_id.clone())); + let create_domain = RegisterExpr::new(Domain::new(domain_id.clone())); let account_id = AccountId::new("account".parse().expect("Valid"), domain_id.clone()); let (public_key, _) = KeyPair::generate() .expect("Failed to generate Key-pair.") .into(); - let create_account = RegisterBox::new(Account::new(account_id.clone(), [public_key])); + let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); let asset_definition_id = AssetDefinitionId::new("xor".parse().expect("Valid"), domain_id); let mut client_config = iroha_client::samples::get_client_config(&get_key_pair()); client_config.torii_api_url = format!("http://{}", peer.api_address).parse().unwrap(); @@ -162,7 +158,7 @@ fn instruction_submits(criterion: &mut Criterion) { let _dropable = group.bench_function("instructions", |b| { b.iter(|| { let quantity: u32 = 200; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), diff --git a/client/benches/tps/dev.rs b/client/benches/tps/dev.rs index 4a67cac7546..716fdfe2eb3 100644 --- a/client/benches/tps/dev.rs +++ b/client/benches/tps/dev.rs @@ -2,11 +2,6 @@ //! using [criterion](https://github.com/bheisler/criterion.rs) //! for performance check during development #![allow(missing_docs)] -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use criterion::{ black_box, criterion_group, criterion_main, @@ -18,9 +13,7 @@ use crate::utils::Config; mod utils; -#[allow(clippy::multiple_inherent_impl)] impl Config { - #[allow(clippy::expect_used)] fn bench(self, c: &mut Criterion) { let mut group = c.benchmark_group("tps"); @@ -34,7 +27,6 @@ impl Config { } } -#[allow(clippy::expect_used)] fn bench_tps_with_config(c: &mut Criterion) { let config = Config::from_path("benches/tps/config.json").expect("Failed to configure"); iroha_logger::info!(?config); diff --git a/client/benches/tps/oneshot.rs b/client/benches/tps/oneshot.rs index a7e606ba017..6fd57cf00ba 100644 --- a/client/benches/tps/oneshot.rs +++ b/client/benches/tps/oneshot.rs @@ -7,7 +7,6 @@ use std::{fs::File, io::BufWriter}; use tracing_flame::{FlameLayer, FlushGuard}; use tracing_subscriber::prelude::*; -#[allow(clippy::expect_used, clippy::print_stdout, clippy::use_debug)] fn main() { let args: Vec = std::env::args().collect(); let mut flush_guard: Option>> = None; diff --git a/client/benches/tps/utils.rs b/client/benches/tps/utils.rs index a16fb8c6ecf..34adf4fdf5e 100644 --- a/client/benches/tps/utils.rs +++ b/client/benches/tps/utils.rs @@ -1,8 +1,3 @@ -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use std::{ fmt, fs::File, @@ -58,7 +53,6 @@ impl Config { serde_json::from_reader(reader).wrap_err("Failed to deserialize json from reader") } - #[allow(clippy::expect_used, clippy::unwrap_in_result)] pub fn measure(self) -> Result { // READY let (_rt, network, client) = ::start_test_with_runtime(self.peers, None); @@ -132,14 +126,15 @@ impl Config { let block = blocks .next() .expect("The block is not yet in WSV. Need more sleep?"); - let block = block.as_v1(); ( block + .payload() .transactions .iter() .filter(|tx| tx.error.is_none()) .count(), block + .payload() .transactions .iter() .filter(|tx| tx.error.is_some()) @@ -168,15 +163,14 @@ impl MeasurerUnit { const PREPARATION_BLOCKS_NUMBER: u32 = 3; /// Submit initial transactions for measurement - #[allow(clippy::expect_used, clippy::unwrap_in_result)] fn ready(self) -> Result { let keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); let account_id = account_id(self.name); - let alice_id = ::Id::from_str("alice@wonderland")?; + let alice_id = AccountId::from_str("alice@wonderland")?; let asset_id = asset_id(self.name); - let register_me = RegisterBox::new(Account::new( + let register_me = RegisterExpr::new(Account::new( account_id.clone(), [keypair.public_key().clone()], )); @@ -186,12 +180,12 @@ impl MeasurerUnit { "CanBurnUserAsset".parse().unwrap(), &json!({ "asset_id": asset_id }), ); - let allow_alice_to_burn_my_asset = GrantBox::new(can_burn_my_asset, alice_id.clone()); + let allow_alice_to_burn_my_asset = GrantExpr::new(can_burn_my_asset, alice_id.clone()); let can_transfer_my_asset = PermissionToken::new( "CanTransferUserAsset".parse().unwrap(), &json!({ "asset_id": asset_id }), ); - let allow_alice_to_transfer_my_asset = GrantBox::new(can_transfer_my_asset, alice_id); + let allow_alice_to_transfer_my_asset = GrantExpr::new(can_transfer_my_asset, alice_id); let grant_tx = TransactionBuilder::new(account_id) .with_instructions([ allow_alice_to_burn_my_asset, @@ -200,14 +194,13 @@ impl MeasurerUnit { .sign(keypair)?; self.client.submit_transaction_blocking(&grant_tx)?; - let mint_a_rose = MintBox::new(1_u32, asset_id); + let mint_a_rose = MintExpr::new(1_u32, asset_id); self.client.submit_blocking(mint_a_rose)?; Ok(self) } /// Spawn who checks if all the expected blocks are committed - #[allow(clippy::expect_used)] fn spawn_event_counter(&self) -> thread::JoinHandle> { let listener = self.client.clone(); let (init_sender, init_receiver) = mpsc::channel(); @@ -238,8 +231,7 @@ impl MeasurerUnit { let submitter = self.client.clone(); let interval_us_per_tx = self.config.interval_us_per_tx; let instructions = self.instructions(); - let alice_id = ::Id::from_str("alice@wonderland") - .expect("Failed to parse account id"); + let alice_id = AccountId::from_str("alice@wonderland").expect("Failed to parse account id"); let mut nonce = NonZeroU32::new(1).expect("Valid"); @@ -273,27 +265,26 @@ impl MeasurerUnit { }) } - #[allow(clippy::expect_used)] - fn instructions(&self) -> impl Iterator { + fn instructions(&self) -> impl Iterator { [self.mint_or_burn(), self.relay_a_rose()] .into_iter() .cycle() } - fn mint_or_burn(&self) -> InstructionBox { + fn mint_or_burn(&self) -> InstructionExpr { let is_running_out = Less::new( EvaluatesTo::new_unchecked(Expression::Query( FindAssetQuantityById::new(asset_id(self.name)).into(), )), 100_u32, ); - let supply_roses = MintBox::new(100_u32.to_value(), asset_id(self.name)); - let burn_a_rose = BurnBox::new(1_u32.to_value(), asset_id(self.name)); + let supply_roses = MintExpr::new(100_u32.to_value(), asset_id(self.name)); + let burn_a_rose = BurnExpr::new(1_u32.to_value(), asset_id(self.name)); - Conditional::with_otherwise(is_running_out, supply_roses, burn_a_rose).into() + ConditionalExpr::with_otherwise(is_running_out, supply_roses, burn_a_rose).into() } - fn relay_a_rose(&self) -> InstructionBox { + fn relay_a_rose(&self) -> InstructionExpr { // Save at least one rose // because if asset value hits 0 it's automatically deleted from account // and query `FindAssetQuantityById` return error @@ -303,17 +294,16 @@ impl MeasurerUnit { )), 1_u32, ); - let transfer_rose = TransferBox::new( + let transfer_rose = TransferExpr::new( asset_id(self.name), 1_u32.to_value(), account_id(self.next_name), ); - Conditional::new(enough_to_transfer, transfer_rose).into() + ConditionalExpr::new(enough_to_transfer, transfer_rose).into() } } -#[allow(clippy::expect_used)] fn asset_id(account_name: UnitName) -> AssetId { AssetId::new( "rose#wonderland".parse().expect("Valid"), @@ -321,7 +311,6 @@ fn asset_id(account_name: UnitName) -> AssetId { ) } -#[allow(clippy::expect_used)] fn account_id(name: UnitName) -> AccountId { format!("{name}@wonderland").parse().expect("Valid") } diff --git a/client/examples/million_accounts_genesis.rs b/client/examples/million_accounts_genesis.rs index f2452b2a786..d781b2dfb73 100644 --- a/client/examples/million_accounts_genesis.rs +++ b/client/examples/million_accounts_genesis.rs @@ -1,17 +1,17 @@ -#![allow(missing_docs, clippy::pedantic, clippy::restriction)] - +//! This file contains examples from the Rust tutorial. use std::{thread, time::Duration}; -use iroha::samples::{construct_validator, get_config}; +use iroha::samples::{construct_executor, get_config}; use iroha_data_model::prelude::*; use iroha_genesis::{GenesisNetwork, RawGenesisBlock, RawGenesisBlockBuilder}; +use iroha_primitives::unique_vec; use test_network::{ get_key_pair, wait_for_genesis_committed, Peer as TestPeer, PeerBuilder, TestRuntime, }; use tokio::runtime::Runtime; fn generate_genesis(num_domains: u32) -> RawGenesisBlock { - let mut builder = RawGenesisBlockBuilder::new(); + let mut builder = RawGenesisBlockBuilder::default(); let key_pair = get_key_pair(); for i in 0_u32..num_domains { @@ -29,18 +29,13 @@ fn generate_genesis(num_domains: u32) -> RawGenesisBlock { } builder - .validator( - construct_validator("../default_validator").expect("Failed to construct validator"), - ) + .executor(construct_executor("../default_executor").expect("Failed to construct executor")) .build() } fn main_genesis() { let mut peer = ::new().expect("Failed to create peer"); - let configuration = get_config( - std::iter::once(peer.id.clone()).collect(), - Some(get_key_pair()), - ); + let configuration = get_config(unique_vec![peer.id.clone()], Some(get_key_pair())); let rt = Runtime::test(); let genesis = GenesisNetwork::from_configuration( generate_genesis(1_000_000_u32), @@ -69,8 +64,8 @@ fn create_million_accounts_directly() { format!("bob-{i}").parse().expect("Valid"), domain_id.clone(), ); - let create_domain = RegisterBox::new(Domain::new(domain_id)); - let create_account = RegisterBox::new(Account::new(normal_account_id.clone(), [])); + let create_domain = RegisterExpr::new(Domain::new(domain_id)); + let create_account = RegisterExpr::new(Account::new(normal_account_id.clone(), [])); if test_client .submit_all([create_domain, create_account]) .is_err() diff --git a/client/examples/tutorial.rs b/client/examples/tutorial.rs index b18c9c424ce..f95229602ed 100644 --- a/client/examples/tutorial.rs +++ b/client/examples/tutorial.rs @@ -1,7 +1,5 @@ //! This file contains examples from the Rust tutorial. //! -#![allow(clippy::restriction, clippy::needless_borrow)] - use std::fs::File; use eyre::{Error, WrapErr}; @@ -42,7 +40,7 @@ fn json_config_client_test(config: &Configuration) -> Result<(), Error> { use iroha_client::client::Client; // Initialise a client with a provided config - let _current_client: Client = Client::new(&config)?; + let _current_client: Client = Client::new(config)?; Ok(()) } @@ -52,7 +50,7 @@ fn domain_registration_test(config: &Configuration) -> Result<(), Error> { use iroha_client::client::Client; use iroha_data_model::{ metadata::UnlimitedMetadata, - prelude::{Domain, DomainId, InstructionBox, RegisterBox}, + prelude::{Domain, DomainId, InstructionExpr, RegisterExpr}, }; // #endregion domain_register_example_crates @@ -63,18 +61,18 @@ fn domain_registration_test(config: &Configuration) -> Result<(), Error> { // #region domain_register_example_create_isi // Create an ISI - let create_looking_glass = RegisterBox::new(Domain::new(looking_glass)); + let create_looking_glass = RegisterExpr::new(Domain::new(looking_glass)); // #endregion domain_register_example_create_isi // #region rust_client_create // Create an Iroha client - let iroha_client: Client = Client::new(&config)?; + let iroha_client: Client = Client::new(config)?; // #endregion rust_client_create // #region domain_register_example_prepare_tx // Prepare a transaction let metadata = UnlimitedMetadata::default(); - let instructions: Vec = vec![create_looking_glass.into()]; + let instructions: Vec = vec![create_looking_glass.into()]; let tx = iroha_client .build_transaction(instructions, metadata) .wrap_err("Error building a domain registration transaction")?; @@ -117,12 +115,12 @@ fn account_registration_test(config: &Configuration) -> Result<(), Error> { use iroha_crypto::KeyPair; use iroha_data_model::{ metadata::UnlimitedMetadata, - prelude::{Account, AccountId, InstructionBox, RegisterBox}, + prelude::{Account, AccountId, InstructionExpr, RegisterExpr}, }; // #endregion register_account_crates // Create an Iroha client - let iroha_client: Client = Client::new(&config)?; + let iroha_client: Client = Client::new(config)?; // #region register_account_create // Create an AccountId instance by providing the account and domain name @@ -139,14 +137,14 @@ fn account_registration_test(config: &Configuration) -> Result<(), Error> { // #region register_account_generate // Generate a new account - let create_account = RegisterBox::new(Account::new(account_id, [public_key])); + let create_account = RegisterExpr::new(Account::new(account_id, [public_key])); // #endregion register_account_generate // #region register_account_prepare_tx // Prepare a transaction using the - // Account's RegisterBox + // Account's RegisterExpr let metadata = UnlimitedMetadata::new(); - let instructions: Vec = vec![create_account.into()]; + let instructions: Vec = vec![create_account.into()]; let tx = iroha_client.build_transaction(instructions, metadata)?; // #endregion register_account_prepare_tx @@ -165,12 +163,12 @@ fn asset_registration_test(config: &Configuration) -> Result<(), Error> { use iroha_client::client::Client; use iroha_data_model::prelude::{ - AccountId, AssetDefinition, AssetDefinitionId, AssetId, IdBox, MintBox, RegisterBox, + AccountId, AssetDefinition, AssetDefinitionId, AssetId, IdBox, MintExpr, RegisterExpr, }; // #endregion register_asset_crates // Create an Iroha client - let iroha_client: Client = Client::new(&config)?; + let iroha_client: Client = Client::new(config)?; // #region register_asset_create_asset // Create an asset @@ -181,7 +179,7 @@ fn asset_registration_test(config: &Configuration) -> Result<(), Error> { // #region register_asset_init_submit // Initialise the registration time let register_time = - RegisterBox::new(AssetDefinition::fixed(asset_def_id.clone()).mintable_once()); + RegisterExpr::new(AssetDefinition::fixed(asset_def_id.clone()).mintable_once()); // Submit a registration time iroha_client.submit(register_time)?; @@ -193,8 +191,8 @@ fn asset_registration_test(config: &Configuration) -> Result<(), Error> { .expect("Valid, because the string contains no whitespace, has a single '@' character and is not empty after"); // #region register_asset_mint_submit - // Create a MintBox using a previous asset and account - let mint = MintBox::new( + // Create a MintExpr using a previous asset and account + let mint = MintExpr::new( 12.34_f64.try_to_value()?, IdBox::AssetId(AssetId::new(asset_def_id, account_id)), ); @@ -213,13 +211,13 @@ fn asset_minting_test(config: &Configuration) -> Result<(), Error> { use iroha_client::client::Client; use iroha_data_model::{ - prelude::{AccountId, AssetDefinitionId, AssetId, MintBox, ToValue}, + prelude::{AccountId, AssetDefinitionId, AssetId, MintExpr, ToValue}, IdBox, }; // #endregion mint_asset_crates // Create an Iroha client - let iroha_client: Client = Client::new(&config)?; + let iroha_client: Client = Client::new(config)?; // Define the instances of an Asset and Account // #region mint_asset_define_asset_account @@ -231,7 +229,7 @@ fn asset_minting_test(config: &Configuration) -> Result<(), Error> { // Mint the Asset instance // #region mint_asset_mint - let mint_roses = MintBox::new( + let mint_roses = MintExpr::new( 42_u32.to_value(), IdBox::AssetId(AssetId::new(roses, alice)), ); @@ -249,7 +247,7 @@ fn asset_minting_test(config: &Configuration) -> Result<(), Error> { // or `roses.to_string() + "#" + alice.to_string()`. // The `##` is a short-hand for the rose `which belongs to the same domain as the account // to which it belongs to. - let mint_roses_alt = MintBox::new( + let mint_roses_alt = MintExpr::new( 10_u32.to_value(), IdBox::AssetId("rose##alice@wonderland".parse()?), ); @@ -271,13 +269,13 @@ fn asset_burning_test(config: &Configuration) -> Result<(), Error> { use iroha_client::client::Client; use iroha_data_model::{ - prelude::{AccountId, AssetDefinitionId, AssetId, BurnBox, ToValue}, + prelude::{AccountId, AssetDefinitionId, AssetId, BurnExpr, ToValue}, IdBox, }; // #endregion burn_asset_crates // Create an Iroha client - let iroha_client: Client = Client::new(&config)?; + let iroha_client: Client = Client::new(config)?; // #region burn_asset_define_asset_account // Define the instances of an Asset and Account @@ -289,7 +287,7 @@ fn asset_burning_test(config: &Configuration) -> Result<(), Error> { // #region burn_asset_burn // Burn the Asset instance - let burn_roses = BurnBox::new( + let burn_roses = BurnExpr::new( 10_u32.to_value(), IdBox::AssetId(AssetId::new(roses, alice)), ); @@ -307,7 +305,7 @@ fn asset_burning_test(config: &Configuration) -> Result<(), Error> { // or `roses.to_string() + "#" + alice.to_string()`. // The `##` is a short-hand for the rose `which belongs to the same domain as the account // to which it belongs to. - let burn_roses_alt = BurnBox::new( + let burn_roses_alt = BurnExpr::new( 10_u32.to_value(), IdBox::AssetId("rose##alice@wonderland".parse()?), ); diff --git a/client/src/client.rs b/client/src/client.rs index 492fbea2d50..0978ff49d58 100644 --- a/client/src/client.rs +++ b/client/src/client.rs @@ -1,10 +1,5 @@ //! Contains the end-point querying logic. This is where you need to //! add any custom end-point related logic. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use std::{ collections::HashMap, fmt::Debug, @@ -21,14 +16,13 @@ use http_default::{AsyncWebSocketStream, WebSocketStream}; use iroha_config::{client::Configuration, torii::uri, GetConfiguration, PostConfiguration}; use iroha_crypto::{HashOf, KeyPair}; use iroha_data_model::{ - block::VersionedCommittedBlock, - http::VersionedBatchedResponse, + block::SignedBlock, isi::Instruction, predicate::PredicateBox, prelude::*, - query::{ForwardCursor, Pagination, Query, Sorting}, + query::{Pagination, Query, Sorting}, transaction::TransactionPayload, - ValidationFail, + BatchedResponse, ValidationFail, }; use iroha_logger::prelude::*; use iroha_telemetry::metrics::Status; @@ -42,6 +36,7 @@ use self::{blocks_api::AsyncBlockStream, events_api::AsyncEventStream}; use crate::{ http::{Method as HttpMethod, RequestBuilder, Response, StatusCode}, http_default::{self, DefaultRequestBuilder, WebSocketError, WebSocketMessage}, + query_builder::QueryRequestBuilder, }; const APPLICATION_JSON: &str = "application/json"; @@ -76,23 +71,23 @@ pub trait Sign { fn sign( self, key_pair: iroha_crypto::KeyPair, - ) -> Result; + ) -> Result; } impl Sign for TransactionBuilder { fn sign( self, key_pair: iroha_crypto::KeyPair, - ) -> Result { + ) -> Result { self.sign(key_pair) } } -impl Sign for VersionedSignedTransaction { +impl Sign for SignedTransaction { fn sign( self, key_pair: iroha_crypto::KeyPair, - ) -> Result { + ) -> Result { self.sign(key_pair) } } @@ -105,10 +100,10 @@ where // Separate-compilation friendly response handling fn _handle_query_response_base( resp: &Response>, - ) -> QueryResult> { + ) -> QueryResult> { match resp.status() { StatusCode::OK => { - let res = VersionedBatchedResponse::decode_all_versioned(resp.body()); + let res = BatchedResponse::decode_all_versioned(resp.body()); res.wrap_err( "Failed to decode response from Iroha. \ You are likely using a version of the client library \ @@ -143,16 +138,13 @@ where } } - let response = _handle_query_response_base(resp) - .map(|VersionedBatchedResponse::V1(response)| response)?; - - let (batch, cursor) = response.into(); + let (batch, cursor) = _handle_query_response_base(resp)?.into(); let value = R::try_from(batch) .map_err(Into::into) .wrap_err("Unexpected type")?; - self.query_request.query_cursor = cursor; + self.query_request.request = iroha_data_model::query::QueryRequest::Cursor(cursor); Ok(value) } } @@ -253,6 +245,15 @@ pub struct ResultSet { client_cursor: usize, } +impl ResultSet { + /// Get the length of the batch returned by Iroha. + /// + /// This is controlled by `fetch_size` parameter of the query. + pub fn batch_len(&self) -> usize { + self.iter.len() + } +} + impl Iterator for ResultSet where Vec: QueryOutput, @@ -262,13 +263,12 @@ where fn next(&mut self) -> Option { if self.client_cursor >= self.iter.len() { - if self - .query_handler - .query_request - .query_cursor - .cursor() - .is_none() - { + let iroha_data_model::query::QueryRequest::Cursor(cursor) = + &self.query_handler.query_request.request + else { + return None; + }; + if cursor.cursor().is_none() { return None; } @@ -310,7 +310,7 @@ where } } -macro_rules! impl_query_result { +macro_rules! impl_query_output { ( $($ident:ty),+ $(,)? ) => { $( impl QueryOutput for $ident { type Target = Self; @@ -321,7 +321,7 @@ macro_rules! impl_query_result { } )+ }; } -impl_query_result! { +impl_query_output! { bool, iroha_data_model::Value, iroha_data_model::numeric::NumericValue, @@ -334,34 +334,32 @@ impl_query_result! { iroha_data_model::query::MetadataValue, iroha_data_model::query::TransactionQueryOutput, iroha_data_model::permission::PermissionTokenSchema, - iroha_data_model::trigger::Trigger, + iroha_data_model::trigger::Trigger, } /// Iroha client #[derive(Clone, DebugCustom, Display)] #[debug( - fmt = "Client {{ torii: {torii_url}, telemetry_url: {telemetry_url}, public_key: {} }}", + fmt = "Client {{ torii: {torii_url}, public_key: {} }}", "key_pair.public_key()" )] #[display(fmt = "{}@{torii_url}", "key_pair.public_key()")] pub struct Client { /// Url for accessing iroha node - torii_url: Url, - /// Url to report status for administration - telemetry_url: Url, + pub torii_url: Url, /// Accounts keypair - key_pair: KeyPair, + pub key_pair: KeyPair, /// Transaction time to live in milliseconds - transaction_ttl: Option, + pub transaction_ttl: Option, /// Transaction status timeout - transaction_status_timeout: Duration, + pub transaction_status_timeout: Duration, /// Current account - account_id: AccountId, + pub account_id: AccountId, /// Http headers which will be appended to each request - headers: HashMap, + pub headers: HashMap, /// If `true` add nonce, which makes different hashes for /// transactions which occur repeatedly and/or simultaneously - add_transaction_nonce: bool, + pub add_transaction_nonce: bool, } /// Query request @@ -369,10 +367,7 @@ pub struct Client { pub struct QueryRequest { torii_url: Url, headers: HashMap, - request: Vec, - sorting: Sorting, - pagination: Pagination, - query_cursor: ForwardCursor, + request: iroha_data_model::query::QueryRequest>, } impl QueryRequest { @@ -383,22 +378,34 @@ impl QueryRequest { Self { torii_url: format!("http://{torii_url}").parse().unwrap(), headers: HashMap::new(), - request: Vec::new(), - sorting: Sorting::default(), - pagination: Pagination::default(), - query_cursor: ForwardCursor::default(), + request: iroha_data_model::query::QueryRequest::Query( + iroha_data_model::query::QueryWithParameters { + query: Vec::default(), + sorting: Sorting::default(), + pagination: Pagination::default(), + fetch_size: FetchSize::default(), + }, + ), } } + fn assemble(self) -> DefaultRequestBuilder { - DefaultRequestBuilder::new( + let builder = DefaultRequestBuilder::new( HttpMethod::POST, self.torii_url.join(uri::QUERY).expect("Valid URI"), ) - .headers(self.headers) - .params(Vec::from(self.sorting)) - .params(Vec::from(self.pagination)) - .params(Vec::from(self.query_cursor)) - .body(self.request) + .headers(self.headers); + + match self.request { + iroha_data_model::query::QueryRequest::Query(query_with_params) => builder + .params(query_with_params.sorting().clone().into_query_parameters()) + .params(query_with_params.pagination().into_query_parameters()) + .params(query_with_params.fetch_size().into_query_parameters()) + .body(query_with_params.query().clone()), + iroha_data_model::query::QueryRequest::Cursor(cursor) => { + builder.params(Vec::from(cursor)) + } + } } } @@ -415,7 +422,7 @@ impl Client { /// Constructor for client from configuration and headers /// - /// *Authentication* header will be added, if `login` and `password` fields are presented + /// *Authorization* header will be added, if `login` and `password` fields are presented /// /// # Errors /// If configuration isn't valid (e.g public/private keys don't match) @@ -426,13 +433,13 @@ impl Client { ) -> Result { if let Some(basic_auth) = &configuration.basic_auth { let credentials = format!("{}:{}", basic_auth.web_login, basic_auth.password); - let encoded = base64::encode(credentials); + let engine = base64::engine::general_purpose::STANDARD; + let encoded = base64::engine::Engine::encode(&engine, credentials); headers.insert(String::from("Authorization"), format!("Basic {encoded}")); } Ok(Self { torii_url: configuration.torii_api_url.clone(), - telemetry_url: configuration.torii_telemetry_url.clone(), key_pair: KeyPair::new( configuration.public_key.clone(), configuration.private_key.clone(), @@ -457,7 +464,7 @@ impl Client { &self, instructions: impl Into, metadata: UnlimitedMetadata, - ) -> Result { + ) -> Result { let tx_builder = TransactionBuilder::new(self.account_id.clone()); let mut tx_builder = match instructions.into() { @@ -483,10 +490,7 @@ impl Client { /// /// # Errors /// Fails if signature generation fails - pub fn sign_transaction( - &self, - transaction: Tx, - ) -> Result { + pub fn sign_transaction(&self, transaction: Tx) -> Result { transaction .sign(self.key_pair.clone()) .wrap_err("Failed to sign transaction") @@ -496,7 +500,7 @@ impl Client { /// /// # Errors /// Fails if signature generation fails - pub fn sign_query(&self, query: QueryBuilder) -> Result { + pub fn sign_query(&self, query: QueryBuilder) -> Result { query .sign(self.key_pair.clone()) .wrap_err("Failed to sign query") @@ -559,7 +563,7 @@ impl Client { /// Fails if sending transaction to peer fails or if it response with error pub fn submit_transaction( &self, - transaction: &VersionedSignedTransaction, + transaction: &SignedTransaction, ) -> Result> { iroha_logger::trace!(tx=?transaction, "Submitting"); let (req, hash) = self.prepare_transaction_request::(transaction); @@ -578,7 +582,7 @@ impl Client { /// Fails if sending a transaction to a peer fails or there is an error in the response pub fn submit_transaction_blocking( &self, - transaction: &VersionedSignedTransaction, + transaction: &SignedTransaction, ) -> Result> { let (init_sender, init_receiver) = tokio::sync::oneshot::channel(); let hash = transaction.payload().hash(); @@ -654,7 +658,7 @@ impl Client { PipelineStatus::Rejected(ref reason) => { return Err(reason.clone().into()); } - PipelineStatus::Committed => return Ok(hash.transmute()), + PipelineStatus::Committed => return Ok(hash), } } } @@ -672,7 +676,7 @@ impl Client { /// For general usage example see [`Client::prepare_query_request`]. fn prepare_transaction_request( &self, - transaction: &VersionedSignedTransaction, + transaction: &SignedTransaction, ) -> (B, HashOf) { let transaction_bytes: Vec = transaction.encode_versioned(); @@ -806,6 +810,7 @@ impl Client { filter: PredicateBox, pagination: Pagination, sorting: Sorting, + fetch_size: FetchSize, ) -> Result<(DefaultRequestBuilder, QueryResponseHandler)> where >::Error: Into, @@ -816,10 +821,11 @@ impl Client { let query_request = QueryRequest { torii_url: self.torii_url.clone(), headers: self.headers.clone(), - request, - sorting, - pagination, - query_cursor: ForwardCursor::default(), + request: iroha_data_model::query::QueryRequest::Query( + iroha_data_model::query::QueryWithParameters::new( + request, sorting, pagination, fetch_size, + ), + ), }; Ok(( @@ -832,10 +838,11 @@ impl Client { /// /// # Errors /// Fails if sending request fails - pub fn request_with_filter_and_pagination_and_sorting( + pub(crate) fn request_with_filter_and_pagination_and_sorting( &self, request: R, pagination: Pagination, + fetch_size: FetchSize, sorting: Sorting, filter: PredicateBox, ) -> QueryResult<::Target> @@ -845,7 +852,7 @@ impl Client { { iroha_logger::trace!(?request, %pagination, ?sorting, ?filter); let (req, mut resp_handler) = - self.prepare_query_request::(request, filter, pagination, sorting)?; + self.prepare_query_request::(request, filter, pagination, sorting, fetch_size)?; let response = req.build()?.send()?; let value = resp_handler.handle(&response)?; @@ -854,139 +861,60 @@ impl Client { Ok(output) } - /// Create a request with pagination and sorting. - /// - /// # Errors - /// Fails if sending request fails - pub fn request_with_pagination_and_sorting( - &self, - request: R, - pagination: Pagination, - sorting: Sorting, - ) -> QueryResult<::Target> - where - R::Output: QueryOutput, - >::Error: Into, - { - self.request_with_filter_and_pagination_and_sorting( - request, - pagination, - sorting, - PredicateBox::default(), - ) - } - - /// Create a request with pagination, sorting, and the given filter. - /// - /// # Errors - /// Fails if sending request fails - pub fn request_with_filter_and_pagination( - &self, - request: R, - pagination: Pagination, - filter: PredicateBox, - ) -> QueryResult<::Target> - where - R::Output: QueryOutput, - >::Error: Into, - { - self.request_with_filter_and_pagination_and_sorting( - request, - pagination, - Sorting::default(), - filter, - ) - } - - /// Create a request with sorting and the given filter. + /// Query API entry point. Shorthand for `self.build_query(r).execute()`. /// /// # Errors /// Fails if sending request fails - pub fn request_with_filter_and_sorting( - &self, - request: R, - sorting: Sorting, - filter: PredicateBox, - ) -> QueryResult<::Target> + pub fn request(&self, request: R) -> QueryResult<::Target> where + R: Query + Debug, R::Output: QueryOutput, >::Error: Into, { - self.request_with_filter_and_pagination_and_sorting( - request, - Pagination::default(), - sorting, - filter, - ) + self.build_query(request).execute() } - /// Query API entry point. Requests quieries from `Iroha` peers with filter. + /// Query API entry point using cursor. /// - /// Uses default blocking http-client. If you need some custom integration, look at - /// [`Self::prepare_query_request`]. + /// You should probably not use this function directly. /// /// # Errors /// Fails if sending request fails - pub fn request_with_filter( + #[cfg(debug_assertions)] + pub fn request_with_cursor( &self, - request: R, - filter: PredicateBox, - ) -> QueryResult<::Target> + cursor: iroha_data_model::query::cursor::ForwardCursor, + ) -> QueryResult where - R::Output: QueryOutput, - >::Error: Into, + O: QueryOutput, + >::Error: Into, { - self.request_with_filter_and_pagination(request, Pagination::default(), filter) - } + let request = QueryRequest { + torii_url: self.torii_url.clone(), + headers: self.headers.clone(), + request: iroha_data_model::query::QueryRequest::Cursor(cursor), + }; + let response = request.clone().assemble().build()?.send()?; - /// Query API entry point. Requests queries from `Iroha` peers with pagination. - /// - /// Uses default blocking http-client. If you need some custom integration, look at - /// [`Self::prepare_query_request`]. - /// - /// # Errors - /// Fails if sending request fails - pub fn request_with_pagination( - &self, - request: R, - pagination: Pagination, - ) -> QueryResult<::Target> - where - R::Output: QueryOutput, - >::Error: Into, - { - self.request_with_filter_and_pagination(request, pagination, PredicateBox::default()) - } + let mut resp_handler = QueryResponseHandler::::new(request); + let value = resp_handler.handle(&response)?; + let output = O::new(value, resp_handler); - /// Query API entry point. Requests queries from `Iroha` peers with sorting. - /// - /// # Errors - /// Fails if sending request fails - pub fn request_with_sorting( - &self, - request: R, - sorting: Sorting, - ) -> QueryResult<::Target> - where - R::Output: QueryOutput, - >::Error: Into, - { - self.request_with_pagination_and_sorting(request, Pagination::default(), sorting) + Ok(output) } - /// Query API entry point. Requests queries from `Iroha` peers. + /// Query API entry point. + /// Creates a [`QueryRequestBuilder`] which can be used to configure requests queries from `Iroha` peers. /// /// # Errors /// Fails if sending request fails - pub fn request( - &self, - request: R, - ) -> QueryResult<::Target> + pub fn build_query(&self, request: R) -> QueryRequestBuilder<'_, R> where + R: Query + Debug, R::Output: QueryOutput, >::Error: Into, { - self.request_with_pagination(request, Pagination::default()) + QueryRequestBuilder::new(self, request) } /// Connect (through `WebSocket`) to listen for `Iroha` `pipeline` and `data` events. @@ -1036,7 +964,7 @@ impl Client { pub fn listen_for_blocks( &self, height: NonZeroU64, - ) -> Result>> { + ) -> Result>> { blocks_api::BlockIterator::new(self.blocks_handler(height)?) } @@ -1081,12 +1009,12 @@ impl Client { /// - if subscribing to websocket fails pub fn get_original_transaction_with_pagination( &self, - transaction: &VersionedSignedTransaction, + transaction: &SignedTransaction, retry_count: u32, retry_in: Duration, pagination: Pagination, - ) -> Result> { - let pagination: Vec<_> = pagination.into(); + ) -> Result> { + let pagination = pagination.into_query_parameters(); for _ in 0..retry_count { let response = DefaultRequestBuilder::new( HttpMethod::GET, @@ -1100,7 +1028,7 @@ impl Client { .send()?; if response.status() == StatusCode::OK { - let pending_transactions: Vec = + let pending_transactions: Vec = DecodeAll::decode_all(&mut response.body().as_slice())?; let transaction = pending_transactions @@ -1133,10 +1061,10 @@ impl Client { /// - if sending request fails pub fn get_original_transaction( &self, - transaction: &VersionedSignedTransaction, + transaction: &SignedTransaction, retry_count: u32, retry_in: Duration, - ) -> Result> { + ) -> Result> { self.get_original_transaction_with_pagination( transaction, retry_count, @@ -1228,7 +1156,7 @@ impl Client { pub fn prepare_status_request(&self) -> B { B::new( HttpMethod::GET, - self.telemetry_url.join(uri::STATUS).expect("Valid URI"), + self.torii_url.join(uri::STATUS).expect("Valid URI"), ) .headers(self.headers.clone()) } @@ -1270,7 +1198,7 @@ pub mod stream_api { } = Init::::init(handler); let mut stream = req.build()?.connect()?; - stream.write_message(WebSocketMessage::Binary(first_message))?; + stream.send(WebSocketMessage::Binary(first_message))?; trace!("`SyncIterator` created successfully"); Ok(SyncIterator { @@ -1285,7 +1213,7 @@ pub mod stream_api { fn next(&mut self) -> Option { loop { - match self.stream.read_message() { + match self.stream.read() { Ok(WebSocketMessage::Binary(message)) => { return Some(self.handler.message(message)) } @@ -1303,7 +1231,7 @@ pub mod stream_api { fn drop(&mut self) { let mut close = || -> eyre::Result<()> { self.stream.close(None)?; - let msg = self.stream.read_message()?; + let msg = self.stream.read()?; if !msg.is_close() { return Err(eyre!( "Server hasn't sent `Close` message for websocket handshake" @@ -1448,10 +1376,7 @@ pub mod events_api { url, } = self; - let msg = - VersionedEventSubscriptionRequest::from(EventSubscriptionRequest::new(filter)) - .encode_versioned(); - + let msg = EventSubscriptionRequest::new(filter).encode(); InitData::new(R::new(HttpMethod::GET, url).headers(headers), msg, Events) } } @@ -1464,8 +1389,7 @@ pub mod events_api { type Event = iroha_data_model::prelude::Event; fn message(&self, message: Vec) -> Result { - let event_socket_message = - VersionedEventMessage::decode_all_versioned(&message)?.into_v1(); + let event_socket_message = EventMessage::decode_all(&mut message.as_slice())?; Ok(event_socket_message.into()) } } @@ -1532,10 +1456,7 @@ mod blocks_api { url, } = self; - let msg = - VersionedBlockSubscriptionRequest::from(BlockSubscriptionRequest::new(height)) - .encode_versioned(); - + let msg = BlockSubscriptionRequest::new(height).encode(); InitData::new(R::new(HttpMethod::GET, url).headers(headers), msg, Events) } } @@ -1545,11 +1466,10 @@ mod blocks_api { pub struct Events; impl FlowEvents for Events { - type Event = iroha_data_model::block::VersionedCommittedBlock; + type Event = iroha_data_model::block::SignedBlock; fn message(&self, message: Vec) -> Result { - let block_msg = VersionedBlockMessage::decode_all_versioned(&message)?.into_v1(); - Ok(block_msg.into()) + Ok(BlockMessage::decode_all(&mut message.as_slice()).map(Into::into)?) } } } @@ -1610,7 +1530,7 @@ pub mod asset { } /// Construct a query to get an asset by its id - pub fn by_id(asset_id: impl Into::Id>>) -> FindAssetById { + pub fn by_id(asset_id: impl Into>) -> FindAssetById { FindAssetById::new(asset_id) } } @@ -1632,7 +1552,7 @@ pub mod block { /// Construct a query to find block header by hash pub fn header_by_hash( - hash: impl Into>>, + hash: impl Into>>, ) -> FindBlockHeaderByHash { FindBlockHeaderByHash::new(hash) } @@ -1672,7 +1592,7 @@ pub mod transaction { /// Construct a query to retrieve transaction by hash pub fn by_hash( - hash: impl Into>>, + hash: impl Into>>, ) -> FindTransactionByHash { FindTransactionByHash::new(hash) } @@ -1743,12 +1663,11 @@ pub mod parameter { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] use std::str::FromStr; use iroha_config::{ client::{BasicAuth, ConfigurationProxy, WebLogin}, - torii::{uri::DEFAULT_API_ADDR, DEFAULT_TORII_TELEMETRY_ADDR}, + torii::uri::DEFAULT_API_ADDR, }; use iroha_primitives::small::SmallStr; @@ -1772,11 +1691,6 @@ mod tests { .expect("This account ID should be valid"), ), torii_api_url: Some(format!("http://{DEFAULT_API_ADDR}").parse().unwrap()), - torii_telemetry_url: Some( - format!("http://{DEFAULT_TORII_TELEMETRY_ADDR}") - .parse() - .unwrap(), - ), add_transaction_nonce: Some(true), ..ConfigurationProxy::default() } @@ -1786,21 +1700,20 @@ mod tests { let build_transaction = || { client - .build_transaction(Vec::::new(), UnlimitedMetadata::new()) + .build_transaction(Vec::::new(), UnlimitedMetadata::new()) .unwrap() }; let tx1 = build_transaction(); let mut tx2 = build_transaction(); assert_ne!(tx1.payload().hash(), tx2.payload().hash()); - let VersionedSignedTransaction::V1(tx2_ref) = &mut tx2; - tx2_ref.payload.creation_time_ms = tx1 + tx2.payload_mut().creation_time_ms = tx1 .payload() .creation_time() .as_millis() .try_into() .expect("Valid"); - tx2_ref.payload.nonce = tx1.payload().nonce; + tx2.payload_mut().nonce = tx1.payload().nonce; assert_eq!(tx1.payload().hash(), tx2.payload().hash()); } @@ -1819,7 +1732,7 @@ mod tests { ), private_key: Some(iroha_crypto::PrivateKey::from_hex( iroha_crypto::Algorithm::Ed25519, - "9AC47ABF59B356E0BD7DCBBBB4DEC080E302156A48CA907E47CB6AEA1D32719E7233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0".as_ref() + "9AC47ABF59B356E0BD7DCBBBB4DEC080E302156A48CA907E47CB6AEA1D32719E7233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" ).expect("Private key not hex encoded")), account_id: Some( "alice@wonderland" @@ -1827,7 +1740,6 @@ mod tests { .expect("This account ID should be valid"), ), torii_api_url: Some(format!("http://{DEFAULT_API_ADDR}").parse().unwrap()), - torii_telemetry_url: Some(format!("http://{DEFAULT_TORII_TELEMETRY_ADDR}").parse().unwrap()), basic_auth: Some(Some(basic_auth)), ..ConfigurationProxy::default() } diff --git a/client/src/http_default.rs b/client/src/http_default.rs index 2147c9b61ce..7229d35f768 100644 --- a/client/src/http_default.rs +++ b/client/src/http_default.rs @@ -1,9 +1,4 @@ //! Defaults for various items used in communication over http(s). -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use std::{net::TcpStream, str::FromStr}; use attohttpc::{ @@ -11,8 +6,8 @@ use attohttpc::{ }; use eyre::{eyre, Error, Result, WrapErr}; use http::header::HeaderName; -use tungstenite::{stream::MaybeTlsStream, WebSocket}; -pub use tungstenite::{Error as WebSocketError, Message as WebSocketMessage}; +use tokio_tungstenite::tungstenite::{stream::MaybeTlsStream, WebSocket}; +pub use tokio_tungstenite::tungstenite::{Error as WebSocketError, Message as WebSocketMessage}; use url::Url; use crate::http::{Method, RequestBuilder, Response}; @@ -122,9 +117,21 @@ impl DefaultWebSocketRequestBuilder { /// Consumes itself to build request. pub fn build(self) -> Result { - self.0 - .and_then(|b| b.body(()).map_err(Into::into)) - .map(DefaultWebSocketStreamRequest) + let mut req = self.0.and_then(|b| b.body(()).map_err(Into::into))?; + + let uri = req.uri().to_string(); + let headers = req.headers_mut(); + + headers.insert("Host", uri.parse()?); + headers.insert("Connection", "Upgrade".parse()?); + headers.insert("Upgrade", "websocket".parse()?); + headers.insert("Sec-WebSocket-Version", "13".parse()?); + headers.insert( + "Sec-WebSocket-Key", + tokio_tungstenite::tungstenite::handshake::client::generate_key().parse()?, + ); + + Ok(DefaultWebSocketStreamRequest(req)) } } diff --git a/client/src/lib.rs b/client/src/lib.rs index 37f5ee07156..02112f6400a 100644 --- a/client/src/lib.rs +++ b/client/src/lib.rs @@ -5,17 +5,17 @@ pub mod client; /// Module with general communication primitives like an HTTP request builder. pub mod http; mod http_default; +mod query_builder; /// Module containing sample configurations for tests and benchmarks. pub mod samples { use iroha_config::{ client::{Configuration, ConfigurationProxy}, - torii::{uri::DEFAULT_API_ADDR, DEFAULT_TORII_TELEMETRY_ADDR}, + torii::uri::DEFAULT_API_ADDR, }; use iroha_crypto::KeyPair; /// Get sample client configuration. - #[allow(clippy::expect_used)] pub fn get_client_config(key_pair: &KeyPair) -> Configuration { let (public_key, private_key) = key_pair.clone().into(); ConfigurationProxy { @@ -31,11 +31,6 @@ pub mod samples { .parse() .expect("Should be a valid url"), ), - torii_telemetry_url: Some( - format!("http://{DEFAULT_TORII_TELEMETRY_ADDR}") - .parse() - .expect("Should be a valid url"), - ), ..ConfigurationProxy::default() } .build() diff --git a/client/src/query_builder.rs b/client/src/query_builder.rs new file mode 100644 index 00000000000..71fc1878d7f --- /dev/null +++ b/client/src/query_builder.rs @@ -0,0 +1,66 @@ +use std::fmt::Debug; + +use iroha_data_model::{ + predicate::PredicateBox, + query::{sorting::Sorting, FetchSize, Pagination, Query}, + Value, +}; + +use crate::client::{Client, QueryOutput, QueryResult}; + +pub struct QueryRequestBuilder<'a, R> { + client: &'a Client, + request: R, + pagination: Pagination, + filter: PredicateBox, + sorting: Sorting, + fetch_size: FetchSize, +} + +impl<'a, R> QueryRequestBuilder<'a, R> +where + R: Query + Debug, + R::Output: QueryOutput, + >::Error: Into, +{ + pub(crate) fn new(client: &'a Client, request: R) -> Self { + Self { + client, + request, + pagination: Pagination::default(), + sorting: Sorting::default(), + filter: PredicateBox::default(), + fetch_size: FetchSize::default(), + } + } + + pub fn with_filter(mut self, filter: PredicateBox) -> Self { + self.filter = filter; + self + } + + pub fn with_sorting(mut self, sorting: Sorting) -> Self { + self.sorting = sorting; + self + } + + pub fn with_pagination(mut self, pagination: Pagination) -> Self { + self.pagination = pagination; + self + } + + pub fn with_fetch_size(mut self, fetch_size: FetchSize) -> Self { + self.fetch_size = fetch_size; + self + } + + pub fn execute(self) -> QueryResult<::Target> { + self.client.request_with_filter_and_pagination_and_sorting( + self.request, + self.pagination, + self.fetch_size, + self.sorting, + self.filter, + ) + } +} diff --git a/client/tests/integration/add_account.rs b/client/tests/integration/add_account.rs index d40fa4d6304..32d5462c2fe 100644 --- a/client/tests/integration/add_account.rs +++ b/client/tests/integration/add_account.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::thread; use eyre::Result; @@ -16,14 +14,14 @@ fn client_add_account_with_name_length_more_than_limit_should_not_commit_transac let pipeline_time = super::Configuration::pipeline_time(); let normal_account_id: AccountId = "bob@wonderland".parse().expect("Valid"); - let create_account = RegisterBox::new(Account::new(normal_account_id.clone(), [])); + let create_account = RegisterExpr::new(Account::new(normal_account_id.clone(), [])); test_client.submit(create_account)?; let too_long_account_name = "0".repeat(2_usize.pow(14)); let incorrect_account_id: AccountId = (too_long_account_name + "@wonderland") .parse() .expect("Valid"); - let create_account = RegisterBox::new(Account::new(incorrect_account_id.clone(), [])); + let create_account = RegisterExpr::new(Account::new(incorrect_account_id.clone(), [])); test_client.submit(create_account)?; thread::sleep(pipeline_time * 2); diff --git a/client/tests/integration/add_domain.rs b/client/tests/integration/add_domain.rs index b17acf319db..b10eaf4265b 100644 --- a/client/tests/integration/add_domain.rs +++ b/client/tests/integration/add_domain.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::thread; use eyre::Result; @@ -19,11 +17,11 @@ fn client_add_domain_with_name_length_more_than_limit_should_not_commit_transact // Given let normal_domain_id: DomainId = "sora".parse()?; - let create_domain = RegisterBox::new(Domain::new(normal_domain_id.clone())); + let create_domain = RegisterExpr::new(Domain::new(normal_domain_id.clone())); test_client.submit(create_domain)?; let too_long_domain_name: DomainId = "0".repeat(2_usize.pow(14)).parse()?; - let create_domain = RegisterBox::new(Domain::new(too_long_domain_name.clone())); + let create_domain = RegisterExpr::new(Domain::new(too_long_domain_name.clone())); test_client.submit(create_domain)?; thread::sleep(pipeline_time * 2); diff --git a/client/tests/integration/asset.rs b/client/tests/integration/asset.rs index bd9cde52c1e..8f2059447b1 100644 --- a/client/tests/integration/asset.rs +++ b/client/tests/integration/asset.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{str::FromStr as _, thread}; use eyre::Result; @@ -14,15 +12,15 @@ use super::Configuration; #[test] fn client_register_asset_should_add_asset_once_but_not_twice() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_620).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_620).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("test_asset#wonderland").expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); - let register_asset = RegisterBox::new(Asset::new( + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let register_asset = RegisterExpr::new(Asset::new( AssetId::new(asset_definition_id.clone(), account_id.clone()), AssetValue::Quantity(0), )); @@ -48,7 +46,7 @@ fn client_register_asset_should_add_asset_once_but_not_twice() -> Result<()> { #[test] fn unregister_asset_should_remove_asset_from_account() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_555).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_555).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); // Given @@ -56,9 +54,9 @@ fn unregister_asset_should_remove_asset_from_account() -> Result<()> { let asset_definition_id = AssetDefinitionId::from_str("test_asset#wonderland").expect("Valid"); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); - let register_asset = RegisterBox::new(Asset::new(asset_id.clone(), AssetValue::Quantity(0))); - let unregister_asset = UnregisterBox::new(asset_id); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let register_asset = RegisterExpr::new(Asset::new(asset_id.clone(), AssetValue::Quantity(0))); + let unregister_asset = UnregisterExpr::new(asset_id); test_client.submit_all([create_asset, register_asset])?; @@ -87,24 +85,24 @@ fn unregister_asset_should_remove_asset_from_account() -> Result<()> { #[test] fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_000).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_000).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); let metadata = iroha_data_model::metadata::UnlimitedMetadata::default(); //When let quantity: u32 = 200; - let mint = MintBox::new( + let mint = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), account_id.clone(), )), ); - let instructions: [InstructionBox; 2] = [create_asset.into(), mint.into()]; + let instructions: [InstructionExpr; 2] = [create_asset.into(), mint.into()]; let tx = test_client.build_transaction(instructions, metadata)?; test_client.submit_transaction(&tx)?; test_client.poll_request(client::asset::by_account_id(account_id), |result| { @@ -120,24 +118,25 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount() -> #[test] fn client_add_big_asset_quantity_to_existing_asset_should_increase_asset_amount() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_510).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_510).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::big_quantity(asset_definition_id.clone())); + let create_asset = + RegisterExpr::new(AssetDefinition::big_quantity(asset_definition_id.clone())); let metadata = iroha_data_model::metadata::UnlimitedMetadata::default(); //When let quantity: u128 = 2_u128.pow(65); - let mint = MintBox::new( + let mint = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), account_id.clone(), )), ); - let instructions: [InstructionBox; 2] = [create_asset.into(), mint.into()]; + let instructions: [InstructionExpr; 2] = [create_asset.into(), mint.into()]; let tx = test_client.build_transaction(instructions, metadata)?; test_client.submit_transaction(&tx)?; test_client.poll_request(client::asset::by_account_id(account_id), |result| { @@ -153,26 +152,26 @@ fn client_add_big_asset_quantity_to_existing_asset_should_increase_asset_amount( #[test] fn client_add_asset_with_decimal_should_increase_asset_amount() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_515).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_515).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let identifiable_box = AssetDefinition::fixed(asset_definition_id.clone()); - let create_asset = RegisterBox::new(identifiable_box); + let create_asset = RegisterExpr::new(identifiable_box); let metadata = iroha_data_model::metadata::UnlimitedMetadata::default(); //When let quantity: Fixed = Fixed::try_from(123.456_f64).unwrap(); - let mint = MintBox::new( + let mint = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), account_id.clone(), )), ); - let instructions: [InstructionBox; 2] = [create_asset.into(), mint.into()]; + let instructions: [InstructionExpr; 2] = [create_asset.into(), mint.into()]; let tx = test_client.build_transaction(instructions, metadata)?; test_client.submit_transaction(&tx)?; test_client.poll_request(client::asset::by_account_id(account_id.clone()), |result| { @@ -186,7 +185,7 @@ fn client_add_asset_with_decimal_should_increase_asset_amount() -> Result<()> { // Add some fractional part let quantity2: Fixed = Fixed::try_from(0.55_f64).unwrap(); - let mint = MintBox::new( + let mint = MintExpr::new( quantity2.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), @@ -216,7 +215,7 @@ fn client_add_asset_with_name_length_more_than_limit_should_not_commit_transacti // Given let normal_asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::quantity( + let create_asset = RegisterExpr::new(AssetDefinition::quantity( normal_asset_definition_id.clone(), )); test_client.submit(create_asset)?; @@ -225,7 +224,7 @@ fn client_add_asset_with_name_length_more_than_limit_should_not_commit_transacti let too_long_asset_name = "0".repeat(2_usize.pow(14)); let incorrect_asset_definition_id = AssetDefinitionId::from_str(&(too_long_asset_name + "#wonderland")).expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::quantity( + let create_asset = RegisterExpr::new(AssetDefinition::quantity( incorrect_asset_definition_id.clone(), )); @@ -272,11 +271,11 @@ fn find_rate_and_make_exchange_isi_should_succeed() { let buyer_keypair = KeyPair::generate().expect("Failed to generate seller KeyPair."); let register_account = |account_id: AccountId, signature: PublicKey| { - RegisterBox::new(Account::new(account_id, [signature])) + RegisterExpr::new(Account::new(account_id, [signature])) }; let grant_alice_asset_transfer_permission = |asset_id: AssetId, owner_keypair: KeyPair| { - let allow_alice_to_transfer_asset = GrantBox::new( + let allow_alice_to_transfer_asset = GrantExpr::new( PermissionToken::new( "CanTransferUserAsset".parse().unwrap(), &json!({ "asset_id": asset_id }), @@ -303,7 +302,7 @@ fn find_rate_and_make_exchange_isi_should_succeed() { "exchange", account_id_new("dex", "exchange"), ); - let instructions: [InstructionBox; 12] = [ + let instructions: [InstructionExpr; 12] = [ register::domain("exchange").into(), register::domain("company").into(), register::domain("crypto").into(), @@ -313,17 +312,17 @@ fn find_rate_and_make_exchange_isi_should_succeed() { register::asset_definition("btc", "crypto").into(), register::asset_definition("eth", "crypto").into(), register::asset_definition("btc2eth_rate", "exchange").into(), - MintBox::new( + MintExpr::new( 200_u32.to_value(), IdBox::AssetId(asset_id_new("eth", "crypto", buyer_account_id.clone())), ) .into(), - MintBox::new( + MintExpr::new( 20_u32.to_value(), IdBox::AssetId(asset_id_new("btc", "crypto", seller_account_id.clone())), ) .into(), - MintBox::new(20_u32.to_value(), IdBox::AssetId(asset_id.clone())).into(), + MintExpr::new(20_u32.to_value(), IdBox::AssetId(asset_id.clone())).into(), ]; test_client .submit_all_blocking(instructions) @@ -333,15 +332,15 @@ fn find_rate_and_make_exchange_isi_should_succeed() { grant_alice_asset_transfer_permission(buyer_eth, buyer_keypair); test_client - .submit_all_blocking([Pair::new( - TransferBox::new( + .submit_all_blocking([PairExpr::new( + TransferExpr::new( IdBox::AssetId(asset_id_new("btc", "crypto", seller_account_id.clone())), EvaluatesTo::new_evaluates_to_value(Expression::Query( FindAssetQuantityById::new(asset_id.clone()).into(), )), IdBox::AccountId(buyer_account_id.clone()), ), - TransferBox::new( + TransferExpr::new( IdBox::AssetId(asset_id_new("eth", "crypto", buyer_account_id)), EvaluatesTo::new_evaluates_to_value(Expression::Query( FindAssetQuantityById::new(asset_id).into(), @@ -392,6 +391,40 @@ fn find_rate_and_make_exchange_isi_should_succeed() { assert_eq!(expected_buyer_btc, buyer_btc_quantity); } +#[test] +fn transfer_asset_definition() { + let (_rt, _peer, test_client) = ::new().with_port(11_060).start_with_runtime(); + wait_for_genesis_committed(&[test_client.clone()], 0); + + let alice_id: AccountId = "alice@wonderland".parse().expect("Valid."); + let bob_id: AccountId = "bob@wonderland".parse().expect("Valid."); + let asset_definition_id: AssetDefinitionId = "asset#wonderland".parse().expect("Valid"); + + test_client + .submit_blocking(RegisterExpr::new(AssetDefinition::quantity( + asset_definition_id.clone(), + ))) + .expect("Failed to submit transaction"); + + let asset_definition = test_client + .request(FindAssetDefinitionById::new(asset_definition_id.clone())) + .expect("Failed to execute Iroha Query"); + assert_eq!(asset_definition.owned_by(), &alice_id); + + test_client + .submit_blocking(TransferExpr::new( + alice_id, + asset_definition_id.clone(), + bob_id.clone(), + )) + .expect("Failed to submit transaction"); + + let asset_definition = test_client + .request(FindAssetDefinitionById::new(asset_definition_id)) + .expect("Failed to execute Iroha Query"); + assert_eq!(asset_definition.owned_by(), &bob_id); +} + fn account_id_new(account_name: &str, account_domain: &str) -> AccountId { AccountId::new( account_name.parse().expect("Valid"), @@ -412,12 +445,12 @@ fn asset_id_new(definition_name: &str, definition_domain: &str, account_id: Acco mod register { use super::*; - pub fn domain(name: &str) -> RegisterBox { - RegisterBox::new(Domain::new(DomainId::from_str(name).expect("Valid"))) + pub fn domain(name: &str) -> RegisterExpr { + RegisterExpr::new(Domain::new(DomainId::from_str(name).expect("Valid"))) } - pub fn account(account_name: &str, domain_name: &str) -> RegisterBox { - RegisterBox::new(Account::new( + pub fn account(account_name: &str, domain_name: &str) -> RegisterExpr { + RegisterExpr::new(Account::new( AccountId::new( account_name.parse().expect("Valid"), domain_name.parse().expect("Valid"), @@ -426,8 +459,8 @@ mod register { )) } - pub fn asset_definition(asset_name: &str, domain_name: &str) -> RegisterBox { - RegisterBox::new(AssetDefinition::quantity(AssetDefinitionId::new( + pub fn asset_definition(asset_name: &str, domain_name: &str) -> RegisterExpr { + RegisterExpr::new(AssetDefinition::quantity(AssetDefinitionId::new( asset_name.parse().expect("Valid"), domain_name.parse().expect("Valid"), ))) diff --git a/client/tests/integration/asset_propagation.rs b/client/tests/integration/asset_propagation.rs index ddd34ee81b3..50e7152a81e 100644 --- a/client/tests/integration/asset_propagation.rs +++ b/client/tests/integration/asset_propagation.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{str::FromStr as _, thread}; use eyre::Result; @@ -27,17 +25,17 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount_on_a .into_set_parameters(), )?; - let create_domain = RegisterBox::new(Domain::new(DomainId::from_str("domain")?)); + let create_domain = RegisterExpr::new(Domain::new(DomainId::from_str("domain")?)); let account_id = AccountId::from_str("account@domain")?; let (public_key, _) = KeyPair::generate()?.into(); - let create_account = RegisterBox::new(Account::new(account_id.clone(), [public_key])); + let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); let asset_definition_id = AssetDefinitionId::from_str("xor#domain")?; - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); client.submit_all([create_domain, create_account, create_asset])?; thread::sleep(pipeline_time * 3); //When let quantity: u32 = 200; - client.submit(MintBox::new( + client.submit(MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), @@ -48,7 +46,7 @@ fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount_on_a //Then let peer = network.peers.values().last().unwrap(); - client::Client::test(&peer.api_address, &peer.telemetry_address).poll_request( + client::Client::test(&peer.api_address).poll_request( client::asset::by_account_id(account_id), |result| { let assets = result.collect::>>().expect("Valid"); diff --git a/client/tests/integration/burn_public_keys.rs b/client/tests/integration/burn_public_keys.rs index aece78e66ce..c69618ffa30 100644 --- a/client/tests/integration/burn_public_keys.rs +++ b/client/tests/integration/burn_public_keys.rs @@ -1,15 +1,16 @@ -#![allow(clippy::pedantic, clippy::restriction)] - use iroha_client::client::{account, transaction, Client}; -use iroha_crypto::{KeyPair, PublicKey}; -use iroha_data_model::{isi::Instruction, prelude::*}; +use iroha_crypto::{HashOf, KeyPair, PublicKey}; +use iroha_data_model::{isi::Instruction, prelude::*, transaction::TransactionPayload}; use test_network::*; -fn submit_and_get( - client: &mut Client, +fn submit( + client: &Client, instructions: impl IntoIterator, submitter: Option<(AccountId, KeyPair)>, -) -> TransactionValue { +) -> ( + HashOf, + eyre::Result>, +) { let tx = if let Some((account_id, keypair)) = submitter { TransactionBuilder::new(account_id) .with_instructions(instructions) @@ -22,16 +23,17 @@ fn submit_and_get( client.sign_transaction(tx).unwrap() }; - let hash = tx.hash(); - let _ = client.submit_transaction_blocking(&tx); + (tx.hash(), client.submit_transaction_blocking(&tx)) +} +fn get(client: &Client, hash: HashOf) -> TransactionValue { client .request(transaction::by_hash(hash)) .unwrap() .transaction } -fn account_keys_count(client: &mut Client, account_id: AccountId) -> usize { +fn account_keys_count(client: &Client, account_id: AccountId) -> usize { let account = client.request(account::by_id(account_id)).unwrap(); let signatories = account.signatories(); signatories.len() @@ -41,60 +43,68 @@ fn account_keys_count(client: &mut Client, account_id: AccountId) -> usize { fn public_keys_cannot_be_burned_to_nothing() { const KEYS_COUNT: usize = 3; let charlie_id: AccountId = "charlie@wonderland".parse().expect("Valid"); - let charlie_keys_count = |client: &mut Client| account_keys_count(client, charlie_id.clone()); + let charlie_keys_count = |client: &Client| account_keys_count(client, charlie_id.clone()); - let (_rt, _peer, mut client) = ::new().with_port(10_045).start_with_runtime(); + let (_rt, _peer, client) = ::new().with_port(10_045).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); let charlie_initial_keypair = KeyPair::generate().unwrap(); - let register_charlie = RegisterBox::new(Account::new( + let register_charlie = RegisterExpr::new(Account::new( charlie_id.clone(), [charlie_initial_keypair.public_key().clone()], )); - let _unused = submit_and_get(&mut client, [register_charlie], None); - let mut keys_count = charlie_keys_count(&mut client); + let (tx_hash, res) = submit(&client, [register_charlie], None); + res.unwrap(); + get(&client, tx_hash); + let mut keys_count = charlie_keys_count(&client); assert_eq!(keys_count, 1); let mint_keys = (0..KEYS_COUNT - 1).map(|_| { let (public_key, _) = KeyPair::generate().unwrap().into(); - MintBox::new(public_key, charlie_id.clone()) + MintExpr::new(public_key, charlie_id.clone()) }); - let _unused = submit_and_get( - &mut client, + let (tx_hash, res) = submit( + &client, mint_keys, Some((charlie_id.clone(), charlie_initial_keypair.clone())), ); - keys_count = charlie_keys_count(&mut client); + res.unwrap(); + get(&client, tx_hash); + keys_count = charlie_keys_count(&client); assert_eq!(keys_count, KEYS_COUNT); let charlie = client.request(account::by_id(charlie_id.clone())).unwrap(); let mut keys = charlie.signatories(); - let burn = |key: PublicKey| InstructionBox::from(BurnBox::new(key, charlie_id.clone())); + let burn = |key: PublicKey| InstructionExpr::from(BurnExpr::new(key, charlie_id.clone())); let burn_keys_leaving_one = keys .by_ref() .filter(|pub_key| pub_key != &charlie_initial_keypair.public_key()) .cloned() .map(burn); - let mut committed_txn = submit_and_get( - &mut client, + let (tx_hash, res) = submit( + &client, burn_keys_leaving_one, Some((charlie_id.clone(), charlie_initial_keypair.clone())), ); - keys_count = charlie_keys_count(&mut client); + res.unwrap(); + let committed_txn = get(&client, tx_hash); + keys_count = charlie_keys_count(&client); assert_eq!(keys_count, 1); assert!(committed_txn.error.is_none()); let burn_the_last_key = burn(charlie_initial_keypair.public_key().clone()); - committed_txn = submit_and_get( - &mut client, + let (tx_hash, res) = submit( + &client, std::iter::once(burn_the_last_key), Some((charlie_id.clone(), charlie_initial_keypair)), ); - keys_count = charlie_keys_count(&mut client); + assert!(res.is_err()); + let committed_txn = get(&client, tx_hash); + keys_count = charlie_keys_count(&client); assert_eq!(keys_count, 1); assert!(committed_txn.error.is_some()); } diff --git a/client/tests/integration/config.rs b/client/tests/integration/config.rs index 381f8173c3c..7a6470a9087 100644 --- a/client/tests/integration/config.rs +++ b/client/tests/integration/config.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use test_network::*; use super::{Builder, Configuration, ConfigurationProxy}; @@ -24,6 +22,6 @@ fn get_config() { assert_eq!(cfg_proxy.network.unwrap().build().unwrap(), test.network); assert_eq!( cfg_proxy.telemetry.unwrap().build().unwrap(), - test.telemetry + *test.telemetry ); } diff --git a/client/tests/integration/connected_peers.rs b/client/tests/integration/connected_peers.rs index 7977967113d..c649b2da531 100644 --- a/client/tests/integration/connected_peers.rs +++ b/client/tests/integration/connected_peers.rs @@ -1,5 +1,3 @@ -#![allow(clippy::pedantic, clippy::restriction)] - use std::thread; use eyre::{Context, Result}; @@ -28,7 +26,6 @@ fn connected_peers_with_f_1_0_1() -> Result<()> { fn connected_peers_with_f(faults: u64, start_port: Option) -> Result<()> { let n_peers = 3 * faults + 1; - #[allow(clippy::expect_used)] let (_rt, network, client) = ::start_test_with_runtime( (n_peers) .try_into() @@ -52,8 +49,8 @@ fn connected_peers_with_f(faults: u64, start_port: Option) -> Result<()> { // Unregister a peer: committed with f = `faults` // then `status.peers` decrements let peer = network.peers.values().last().unwrap(); - let peer_client = Client::test(&peer.api_address, &peer.telemetry_address); - let unregister_peer = UnregisterBox::new(IdBox::PeerId(peer.id.clone())); + let peer_client = Client::test(&peer.api_address); + let unregister_peer = UnregisterExpr::new(IdBox::PeerId(peer.id.clone())); client.submit_blocking(unregister_peer)?; thread::sleep(pipeline_time * 2); // Wait for some time to allow peers to connect status = client.get_status()?; @@ -64,7 +61,7 @@ fn connected_peers_with_f(faults: u64, start_port: Option) -> Result<()> { // Re-register the peer: committed with f = `faults` - 1 then // `status.peers` increments - let register_peer = RegisterBox::new(DataModelPeer::new(peer.id.clone())); + let register_peer = RegisterExpr::new(DataModelPeer::new(peer.id.clone())); client.submit_blocking(register_peer)?; thread::sleep(pipeline_time * 4); // Wait for some time to allow peers to connect status = client.get_status()?; diff --git a/client/tests/integration/domain_owner.rs b/client/tests/integration/domain_owner.rs new file mode 100644 index 00000000000..ab3f48e23b8 --- /dev/null +++ b/client/tests/integration/domain_owner.rs @@ -0,0 +1,310 @@ +use eyre::Result; +use iroha_crypto::KeyPair; +use iroha_data_model::{account::SignatureCheckCondition, prelude::*}; +use serde_json::json; +use test_network::*; + +#[test] +fn domain_owner_domain_permissions() -> Result<()> { + let (_rt, _peer, test_client) = ::new().with_port(11_080).start_with_runtime(); + wait_for_genesis_committed(&[test_client.clone()], 0); + + let kingdom_id: DomainId = "kingdom".parse()?; + + // "alice@wonderland" is owner of "kingdom" domain + let kingdom = Domain::new(kingdom_id.clone()); + test_client.submit_blocking(RegisterExpr::new(kingdom))?; + + // check that "alice@wonderland" as owner of domain can edit metadata in her domain + let key: Name = "key".parse()?; + let value: Name = "value".parse()?; + test_client.submit_blocking(SetKeyValueExpr::new(kingdom_id.clone(), key.clone(), value))?; + test_client.submit_blocking(RemoveKeyValueExpr::new(kingdom_id.clone(), key))?; + + // check that "alice@wonderland" as owner of domain can grant and revoke domain related permission tokens + let bob_id: AccountId = "bob@wonderland".parse()?; + let token = PermissionToken::new( + "CanUnregisterDomain".parse().unwrap(), + &json!({ "domain_id": kingdom_id }), + ); + test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + + // check that "alice@wonderland" as owner of domain can unregister her domain + test_client.submit_blocking(UnregisterExpr::new(kingdom_id))?; + + Ok(()) +} + +#[test] +fn domain_owner_account_permissions() -> Result<()> { + let (_rt, _peer, test_client) = ::new().with_port(11_075).start_with_runtime(); + wait_for_genesis_committed(&[test_client.clone()], 0); + + let kingdom_id: DomainId = "kingdom".parse()?; + let mad_hatter_id: AccountId = "mad_hatter@kingdom".parse()?; + + // "alice@wonderland" is owner of "kingdom" domain + let kingdom = Domain::new(kingdom_id); + test_client.submit_blocking(RegisterExpr::new(kingdom))?; + + let mad_hatter_keypair = KeyPair::generate()?; + let mad_hatter = Account::new( + mad_hatter_id.clone(), + [mad_hatter_keypair.public_key().clone()], + ); + test_client.submit_blocking(RegisterExpr::new(mad_hatter))?; + + // check that "alice@wonderland" as owner of domain can burn and mint public keys for accounts in her domain + let mad_hatter_new_keypair = KeyPair::generate()?; + test_client.submit_blocking(MintExpr::new( + mad_hatter_new_keypair.public_key().clone(), + mad_hatter_id.clone(), + ))?; + test_client.submit_blocking(BurnExpr::new( + mad_hatter_new_keypair.public_key().clone(), + mad_hatter_id.clone(), + ))?; + + // check that "alice@wonderland" as owner of domain can change signature check condition for accounts in her domain + test_client.submit_blocking(MintExpr::new( + SignatureCheckCondition::AnyAccountSignatureOr(Vec::new().into()), + mad_hatter_id.clone(), + ))?; + + // check that "alice@wonderland" as owner of domain can edit metadata of account in her domain + let key: Name = "key".parse()?; + let value: Name = "value".parse()?; + test_client.submit_blocking(SetKeyValueExpr::new( + mad_hatter_id.clone(), + key.clone(), + value, + ))?; + test_client.submit_blocking(RemoveKeyValueExpr::new(mad_hatter_id.clone(), key))?; + + // check that "alice@wonderland" as owner of domain can grant and revoke account related permission tokens in her domain + let bob_id: AccountId = "bob@wonderland".parse()?; + let token = PermissionToken::new( + "CanUnregisterAccount".parse().unwrap(), + &json!({ "account_id": mad_hatter_id }), + ); + test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + + // check that "alice@wonderland" as owner of domain can unregister accounts in her domain + test_client.submit_blocking(UnregisterExpr::new(mad_hatter_id))?; + + Ok(()) +} + +#[test] +fn domain_owner_asset_definition_permissions() -> Result<()> { + let (_rt, _peer, test_client) = ::new().with_port(11_085).start_with_runtime(); + wait_for_genesis_committed(&[test_client.clone()], 0); + + let kingdom_id: DomainId = "kingdom".parse()?; + let bob_id: AccountId = "bob@kingdom".parse()?; + let rabbit_id: AccountId = "rabbit@kingdom".parse()?; + let coin_id: AssetDefinitionId = "coin#kingdom".parse()?; + + // "alice@wonderland" is owner of "kingdom" domain + let kingdom = Domain::new(kingdom_id); + test_client.submit_blocking(RegisterExpr::new(kingdom))?; + + let bob_keypair = KeyPair::generate()?; + let bob = Account::new(bob_id.clone(), [bob_keypair.public_key().clone()]); + test_client.submit_blocking(RegisterExpr::new(bob))?; + + let rabbit = Account::new(rabbit_id.clone(), []); + test_client.submit_blocking(RegisterExpr::new(rabbit))?; + + // register asset definitions by "bob@kingdom" so he is owner of it + let coin = AssetDefinition::quantity(coin_id.clone()); + let transaction = TransactionBuilder::new(bob_id.clone()) + .with_instructions([RegisterExpr::new(coin)]) + .sign(bob_keypair)?; + test_client.submit_transaction_blocking(&transaction)?; + + // check that "alice@wonderland" as owner of domain can transfer asset definitions in her domain + test_client.submit_blocking(TransferExpr::new(bob_id, coin_id.clone(), rabbit_id))?; + + // check that "alice@wonderland" as owner of domain can edit metadata of asset definition in her domain + let key: Name = "key".parse()?; + let value: Name = "value".parse()?; + test_client.submit_blocking(SetKeyValueExpr::new(coin_id.clone(), key.clone(), value))?; + test_client.submit_blocking(RemoveKeyValueExpr::new(coin_id.clone(), key))?; + + // check that "alice@wonderland" as owner of domain can grant and revoke asset definition related permission tokens in her domain + let bob_id: AccountId = "bob@wonderland".parse()?; + let token = PermissionToken::new( + "CanUnregisterAssetDefinition".parse().unwrap(), + &json!({ "asset_definition_id": coin_id }), + ); + test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + + // check that "alice@wonderland" as owner of domain can unregister asset definitions in her domain + test_client.submit_blocking(UnregisterExpr::new(coin_id))?; + + Ok(()) +} + +#[test] +fn domain_owner_asset_permissions() -> Result<()> { + let (_rt, _peer, test_client) = ::new().with_port(11_090).start_with_runtime(); + wait_for_genesis_committed(&[test_client.clone()], 0); + + let alice_id: AccountId = "alice@wonderland".parse()?; + let kingdom_id: DomainId = "kingdom".parse()?; + let bob_id: AccountId = "bob@kingdom".parse()?; + let coin_id: AssetDefinitionId = "coin#kingdom".parse()?; + let store_id: AssetDefinitionId = "store#kingdom".parse()?; + + // "alice@wonderland" is owner of "kingdom" domain + let kingdom = Domain::new(kingdom_id); + test_client.submit_blocking(RegisterExpr::new(kingdom))?; + + let bob_keypair = KeyPair::generate()?; + let bob = Account::new(bob_id.clone(), [bob_keypair.public_key().clone()]); + test_client.submit_blocking(RegisterExpr::new(bob))?; + + // register asset definitions by "bob@kingdom" so he is owner of it + let coin = AssetDefinition::quantity(coin_id.clone()); + let store = AssetDefinition::store(store_id.clone()); + let transaction = TransactionBuilder::new(bob_id.clone()) + .with_instructions([RegisterExpr::new(coin), RegisterExpr::new(store)]) + .sign(bob_keypair)?; + test_client.submit_transaction_blocking(&transaction)?; + + // check that "alice@wonderland" as owner of domain can register and unregister assets in her domain + let bob_coin_id = AssetId::new(coin_id, bob_id.clone()); + let bob_coin = Asset::new(bob_coin_id.clone(), 30u32); + test_client.submit_blocking(RegisterExpr::new(bob_coin))?; + test_client.submit_blocking(UnregisterExpr::new(bob_coin_id.clone()))?; + + // check that "alice@wonderland" as owner of domain can burn, mint and transfer assets in her domain + test_client.submit_blocking(MintExpr::new(10u32.to_value(), bob_coin_id.clone()))?; + test_client.submit_blocking(BurnExpr::new(5u32.to_value(), bob_coin_id.clone()))?; + test_client.submit_blocking(TransferExpr::new(bob_coin_id, 5u32.to_value(), alice_id))?; + + // check that "alice@wonderland" as owner of domain can edit metadata of store asset in her domain + let key: Name = "key".parse()?; + let value: Name = "value".parse()?; + let bob_store_id = AssetId::new(store_id, bob_id); + test_client.submit_blocking(SetKeyValueExpr::new( + bob_store_id.clone(), + key.clone(), + value, + ))?; + test_client.submit_blocking(RemoveKeyValueExpr::new(bob_store_id.clone(), key))?; + + // check that "alice@wonderland" as owner of domain can grant and revoke asset related permission tokens in her domain + let bob_id: AccountId = "bob@wonderland".parse()?; + let token = PermissionToken::new( + "CanUnregisterUserAsset".parse().unwrap(), + &json!({ "asset_id": bob_store_id }), + ); + test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + + Ok(()) +} + +#[test] +fn domain_owner_trigger_permissions() -> Result<()> { + let (_rt, _peer, test_client) = ::new().with_port(11_095).start_with_runtime(); + wait_for_genesis_committed(&[test_client.clone()], 0); + + let alice_id: AccountId = "alice@wonderland".parse()?; + let kingdom_id: DomainId = "kingdom".parse()?; + let bob_id: AccountId = "bob@kingdom".parse()?; + + // "alice@wonderland" is owner of "kingdom" domain + let kingdom = Domain::new(kingdom_id); + test_client.submit_blocking(RegisterExpr::new(kingdom))?; + + let bob_keypair = KeyPair::generate()?; + let bob = Account::new(bob_id.clone(), [bob_keypair.public_key().clone()]); + test_client.submit_blocking(RegisterExpr::new(bob))?; + + let asset_definition_id = "rose#wonderland".parse()?; + let asset_id = AssetId::new(asset_definition_id, alice_id.clone()); + let trigger_id: TriggerId = "trigger$kingdom".parse()?; + + let trigger_instructions = vec![MintExpr::new(1_u32, asset_id)]; + let register_trigger = RegisterExpr::new(Trigger::new( + trigger_id.clone(), + Action::new( + trigger_instructions, + Repeats::from(2_u32), + bob_id, + // FIXME: due to restriction in `ExecuteTriggerEventFilter` it's impossible to execute trigger on behalf of multiple users + TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( + trigger_id.clone(), + alice_id, + )), + ), + )); + test_client.submit_blocking(register_trigger)?; + + // check that "alice@wonderland" as owner of domain can edit repetitions of triggers in her domain + test_client.submit_blocking(MintExpr::new(1_u32, trigger_id.clone()))?; + test_client.submit_blocking(BurnExpr::new(1_u32, trigger_id.clone()))?; + + // check that "alice@wonderland" as owner of domain can call triggers in her domain + let execute_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); + let _result = test_client.submit_blocking(execute_trigger)?; + + // check that "alice@wonderland" as owner of domain can grant and revoke trigger related permission tokens in her domain + let bob_id: AccountId = "bob@wonderland".parse()?; + let token = PermissionToken::new( + "CanUnregisterUserTrigger".parse().unwrap(), + &json!({ "trigger_id": trigger_id }), + ); + test_client.submit_blocking(GrantExpr::new(token.clone(), bob_id.clone()))?; + test_client.submit_blocking(RevokeExpr::new(token, bob_id))?; + + // check that "alice@wonderland" as owner of domain can unregister triggers in her domain + test_client.submit_blocking(UnregisterExpr::new(trigger_id))?; + + Ok(()) +} + +#[deprecated( + since = "2.0.0-pre-rc.20", + note = "This test suite is deprecated, use test_transfer_domains.py instead" +)] +#[ignore = "migrated to client cli python tests"] +#[test] +fn domain_owner_transfer() -> Result<()> { + let (_rt, _peer, test_client) = ::new().with_port(11_100).start_with_runtime(); + wait_for_genesis_committed(&[test_client.clone()], 0); + + let alice_id: AccountId = "alice@wonderland".parse()?; + let kingdom_id: DomainId = "kingdom".parse()?; + let bob_id: AccountId = "bob@kingdom".parse()?; + + // "alice@wonderland" is owner of "kingdom" domain + let kingdom = Domain::new(kingdom_id.clone()); + test_client.submit_blocking(RegisterExpr::new(kingdom))?; + + let bob_keypair = KeyPair::generate()?; + let bob = Account::new(bob_id.clone(), [bob_keypair.public_key().clone()]); + test_client.submit_blocking(RegisterExpr::new(bob))?; + + let domain = test_client.request(FindDomainById::new(kingdom_id.clone()))?; + assert_eq!(domain.owned_by(), &alice_id); + + test_client + .submit_blocking(TransferExpr::new( + alice_id, + kingdom_id.clone(), + bob_id.clone(), + )) + .expect("Failed to submit transaction"); + + let asset_definition = test_client.request(FindDomainById::new(kingdom_id))?; + assert_eq!(asset_definition.owned_by(), &bob_id); + + Ok(()) +} diff --git a/client/tests/integration/events/data.rs b/client/tests/integration/events/data.rs index 51c2f245f9c..e4c71bb6fad 100644 --- a/client/tests/integration/events/data.rs +++ b/client/tests/integration/events/data.rs @@ -1,4 +1,3 @@ -#![allow(clippy::restriction)] use std::{fmt::Write as _, str::FromStr, sync::mpsc, thread}; use eyre::Result; @@ -9,14 +8,14 @@ use test_network::*; use crate::wasm::utils::wasm_template; -fn produce_instructions() -> Vec { +fn produce_instructions() -> Vec { let domains = (0..4) .map(|domain_index: usize| Domain::new(domain_index.to_string().parse().expect("Valid"))); - let registers: [InstructionBox; 4] = domains + let registers: [InstructionExpr; 4] = domains .into_iter() - .map(RegisterBox::new) - .map(InstructionBox::from) + .map(RegisterExpr::new) + .map(InstructionExpr::from) .collect::>() .try_into() .unwrap(); @@ -30,12 +29,12 @@ fn produce_instructions() -> Vec { // domain "2" // domain "3" registers[0].clone(), - Pair::new::( + PairExpr::new( registers[1].clone(), - Conditional::with_otherwise( + ConditionalExpr::with_otherwise( false, - FailBox::new("unreachable"), - SequenceBox::new([registers[2].clone(), registers[3].clone()]), + Fail::new("unreachable"), + SequenceExpr::new([registers[2].clone(), registers[3].clone()]), ), ) .into(), @@ -82,7 +81,7 @@ fn wasm_execution_should_produce_events() -> Result<()> { (func (export "{main_fn_name}") (param) {isi_calls})) "#, - main_fn_name = iroha_data_model::wasm::export::fn_names::SMART_CONTRACT_MAIN, + main_fn_name = "_iroha_smart_contract_main", wasm_template = wasm_template(&isi_hex.concat()), isi_calls = isi_calls ); @@ -162,8 +161,8 @@ fn produce_multiple_events() -> Result<()> { init_receiver.recv()?; // Registering role - let alice_id = ::Id::from_str("alice@wonderland")?; - let role_id = ::Id::from_str("TEST_ROLE")?; + let alice_id = AccountId::from_str("alice@wonderland")?; + let role_id = RoleId::from_str("TEST_ROLE")?; let token_1 = PermissionToken::new( "CanRemoveKeyValueInUserAccount".parse()?, &json!({ "account_id": alice_id }), @@ -175,16 +174,16 @@ fn produce_multiple_events() -> Result<()> { let role = iroha_data_model::role::Role::new(role_id.clone()) .add_permission(token_1.clone()) .add_permission(token_2.clone()); - let instructions = [RegisterBox::new(role.clone())]; + let instructions = [RegisterExpr::new(role.clone())]; client.submit_all_blocking(instructions)?; // Grants role to Bob - let bob_id = ::Id::from_str("bob@wonderland")?; - let grant_role = GrantBox::new(role_id.clone(), bob_id.clone()); + let bob_id = AccountId::from_str("bob@wonderland")?; + let grant_role = GrantExpr::new(role_id.clone(), bob_id.clone()); client.submit_blocking(grant_role)?; // Unregister role - let unregister_role = UnregisterBox::new(role_id.clone()); + let unregister_role = UnregisterExpr::new(role_id.clone()); client.submit_blocking(unregister_role)?; // Inspect produced events diff --git a/client/tests/integration/events/notification.rs b/client/tests/integration/events/notification.rs index 12858a02581..89b9a53a7ea 100644 --- a/client/tests/integration/events/notification.rs +++ b/client/tests/integration/events/notification.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{str::FromStr as _, sync::mpsc, thread, time::Duration}; use eyre::{eyre, Result, WrapErr}; @@ -16,11 +14,11 @@ fn trigger_completion_success_should_produce_event() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id); let trigger_id = TriggerId::from_str("mint_rose")?; - let instruction = MintBox::new(1_u32, asset_id.clone()); - let register_trigger = RegisterBox::new(Trigger::new( + let instruction = MintExpr::new(1_u32, asset_id.clone()); + let register_trigger = RegisterExpr::new(Trigger::new( trigger_id.clone(), Action::new( - vec![InstructionBox::from(instruction)], + vec![InstructionExpr::from(instruction)], Repeats::Indefinitely, asset_id.account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( @@ -31,7 +29,7 @@ fn trigger_completion_success_should_produce_event() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - let call_trigger = ExecuteTriggerBox::new(trigger_id.clone()); + let call_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); let thread_client = test_client.clone(); let (sender, receiver) = mpsc::channel(); @@ -65,11 +63,11 @@ fn trigger_completion_failure_should_produce_event() -> Result<()> { let account_id: AccountId = "alice@wonderland".parse()?; let trigger_id = TriggerId::from_str("fail_box")?; - let instruction = FailBox::new("Fail box"); - let register_trigger = RegisterBox::new(Trigger::new( + let instruction = Fail::new("Fail box"); + let register_trigger = RegisterExpr::new(Trigger::new( trigger_id.clone(), Action::new( - vec![InstructionBox::from(instruction)], + vec![InstructionExpr::from(instruction)], Repeats::Indefinitely, account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( @@ -80,7 +78,7 @@ fn trigger_completion_failure_should_produce_event() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - let call_trigger = ExecuteTriggerBox::new(trigger_id.clone()); + let call_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); let thread_client = test_client.clone(); let (sender, receiver) = mpsc::channel(); diff --git a/client/tests/integration/events/pipeline.rs b/client/tests/integration/events/pipeline.rs index b359077c41f..0d41bbb9f4a 100644 --- a/client/tests/integration/events/pipeline.rs +++ b/client/tests/integration/events/pipeline.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::thread::{self, JoinHandle}; use eyre::Result; @@ -26,7 +24,7 @@ fn transaction_with_no_instructions_should_be_committed() -> Result<()> { // #[ignore = "Experiment"] #[test] fn transaction_with_fail_instruction_should_be_rejected() -> Result<()> { - let fail = FailBox::new("Should be rejected"); + let fail = Fail::new("Should be rejected"); test_with_instruction_and_status_and_port( Some(fail.into()), PipelineStatusKind::Rejected, @@ -36,7 +34,7 @@ fn transaction_with_fail_instruction_should_be_rejected() -> Result<()> { #[allow(dead_code, clippy::needless_range_loop, clippy::needless_pass_by_value)] fn test_with_instruction_and_status_and_port( - instruction: Option, + instruction: Option, should_be: PipelineStatusKind, port: u16, ) -> Result<()> { @@ -112,7 +110,7 @@ fn committed_block_must_be_available_in_kura() { .expect("Failed to subscribe for events"); client - .submit(FailBox::new("Dummy instruction")) + .submit(Fail::new("Dummy instruction")) .expect("Failed to submit transaction"); let event = event_iter.next().expect("Block must be committed"); diff --git a/client/tests/integration/mod.rs b/client/tests/integration/mod.rs index c1dc262a0de..5bb44460bb0 100644 --- a/client/tests/integration/mod.rs +++ b/client/tests/integration/mod.rs @@ -10,6 +10,7 @@ mod asset_propagation; mod burn_public_keys; mod config; mod connected_peers; +mod domain_owner; mod events; mod multiple_blocks_created; mod multisignature_account; diff --git a/client/tests/integration/multiple_blocks_created.rs b/client/tests/integration/multiple_blocks_created.rs index 0cbb55fdffc..7f7c4066369 100644 --- a/client/tests/integration/multiple_blocks_created.rs +++ b/client/tests/integration/multiple_blocks_created.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::thread; use eyre::Result; @@ -29,12 +27,12 @@ fn long_multiple_blocks_created() -> Result<()> { .into_set_parameters(), )?; - let create_domain = RegisterBox::new(Domain::new("domain".parse()?)); + let create_domain = RegisterExpr::new(Domain::new("domain".parse()?)); let account_id: AccountId = "account@domain".parse()?; let (public_key, _) = KeyPair::generate()?.into(); - let create_account = RegisterBox::new(Account::new(account_id.clone(), [public_key])); + let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); let asset_definition_id: AssetDefinitionId = "xor#domain".parse()?; - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); client.submit_all([create_domain, create_account, create_asset])?; @@ -44,7 +42,7 @@ fn long_multiple_blocks_created() -> Result<()> { //When for _ in 0..N_BLOCKS { let quantity: u32 = 1; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), @@ -60,7 +58,7 @@ fn long_multiple_blocks_created() -> Result<()> { //Then let peer = network.peers().last().unwrap(); - Client::test(&peer.api_address, &peer.telemetry_address).poll_request( + Client::test(&peer.api_address).poll_request( client::asset::by_account_id(account_id), |result| { let assets = result.collect::>>().expect("Valid"); diff --git a/client/tests/integration/multisignature_account.rs b/client/tests/integration/multisignature_account.rs index dd5f1454328..dfba65c7379 100644 --- a/client/tests/integration/multisignature_account.rs +++ b/client/tests/integration/multisignature_account.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::thread; use eyre::Result; @@ -19,26 +17,26 @@ fn transaction_signed_by_new_signatory_of_account_should_pass() -> Result<()> { // Given let account_id: AccountId = "alice@wonderland".parse().expect("Valid"); let asset_definition_id: AssetDefinitionId = "xor#wonderland".parse().expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); let key_pair = KeyPair::generate()?; - let add_signatory = MintBox::new( + let add_signatory = MintExpr::new( key_pair.public_key().clone(), IdBox::AccountId(account_id.clone()), ); - let instructions: [InstructionBox; 2] = [create_asset.into(), add_signatory.into()]; + let instructions: [InstructionExpr; 2] = [create_asset.into(), add_signatory.into()]; client.submit_all(instructions)?; thread::sleep(pipeline_time * 2); //When let quantity: u32 = 200; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), account_id.clone(), )), ); - Client::test_with_key(&peer.api_address, &peer.telemetry_address, key_pair).submit_till( + Client::test_with_key(&peer.api_address, key_pair).submit_till( mint_asset, client::asset::by_account_id(account_id), |result| { diff --git a/client/tests/integration/multisignature_transaction.rs b/client/tests/integration/multisignature_transaction.rs index cb2abe8ad8a..abbe63bab8a 100644 --- a/client/tests/integration/multisignature_transaction.rs +++ b/client/tests/integration/multisignature_transaction.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{str::FromStr as _, thread, time::Duration}; use eyre::Result; @@ -7,10 +5,8 @@ use iroha_client::client::{self, Client, QueryResult}; use iroha_config::client::Configuration as ClientConfiguration; use iroha_crypto::KeyPair; use iroha_data_model::{ - account::TRANSACTION_SIGNATORIES_VALUE, parameter::{default::MAX_TRANSACTIONS_IN_BLOCK, ParametersBuilder}, prelude::*, - val_vec, }; use test_network::*; @@ -33,32 +29,23 @@ fn multisignature_transactions_should_wait_for_all_signatures() -> Result<()> { let alice_key_pair = get_key_pair(); let key_pair_2 = KeyPair::generate()?; let asset_definition_id = AssetDefinitionId::from_str("camomile#wonderland")?; - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); - let set_signature_condition = MintBox::new( - SignatureCheckCondition::new(EvaluatesTo::new_unchecked(ContainsAll::new( - EvaluatesTo::new_unchecked(ContextValue::new(Name::from_str( - TRANSACTION_SIGNATORIES_VALUE, - )?)), - val_vec![ - alice_key_pair.public_key().clone(), - key_pair_2.public_key().clone(), - ], - ))), + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let set_signature_condition = MintExpr::new( + SignatureCheckCondition::AllAccountSignaturesAnd( + vec![key_pair_2.public_key().clone()].into(), + ), IdBox::AccountId(alice_id.clone()), ); - let mut client_configuration = ClientConfiguration::test( - &network.genesis.api_address, - &network.genesis.telemetry_address, - ); + let mut client_configuration = ClientConfiguration::test(&network.genesis.api_address); let client = Client::new(&client_configuration)?; - let instructions: [InstructionBox; 2] = [create_asset.into(), set_signature_condition.into()]; + let instructions: [InstructionExpr; 2] = [create_asset.into(), set_signature_condition.into()]; client.submit_all_blocking(instructions)?; //When let quantity: u32 = 200; let asset_id = AssetId::new(asset_definition_id, alice_id.clone()); - let mint_asset = MintBox::new(quantity.to_value(), IdBox::AssetId(asset_id.clone())); + let mint_asset = MintExpr::new(quantity.to_value(), IdBox::AssetId(asset_id.clone())); let (public_key1, private_key1) = alice_key_pair.into(); client_configuration.account_id = alice_id.clone(); @@ -84,7 +71,8 @@ fn multisignature_transactions_should_wait_for_all_signatures() -> Result<()> { .collect::>>()?; assert_eq!( assets.len(), - 2 // Alice has roses and cabbage from Genesis + 2, // Alice has roses and cabbage from Genesis, but doesn't yet have camomile + "Multisignature transaction was committed before all required signatures were added" ); let (public_key2, private_key2) = key_pair_2.into(); client_configuration.public_key = public_key2; diff --git a/client/tests/integration/non_mintable.rs b/client/tests/integration/non_mintable.rs index 404ad8d3192..cee51a0bf86 100644 --- a/client/tests/integration/non_mintable.rs +++ b/client/tests/integration/non_mintable.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::str::FromStr as _; use eyre::Result; @@ -9,18 +7,18 @@ use test_network::*; #[test] fn non_mintable_asset_can_be_minted_once_but_not_twice() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_625).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_625).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset = - RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); + RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); let metadata = UnlimitedMetadata::default(); - let mint = MintBox::new( + let mint = MintExpr::new( 200_u32.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), @@ -28,7 +26,7 @@ fn non_mintable_asset_can_be_minted_once_but_not_twice() -> Result<()> { )), ); - let instructions: [InstructionBox; 2] = [create_asset.into(), mint.clone().into()]; + let instructions: [InstructionExpr; 2] = [create_asset.into(), mint.clone().into()]; let tx = test_client.build_transaction(instructions, metadata)?; // We can register and mint the non-mintable token @@ -59,17 +57,17 @@ fn non_mintable_asset_can_be_minted_once_but_not_twice() -> Result<()> { #[test] fn non_mintable_asset_cannot_be_minted_if_registered_with_non_zero_value() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_610).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_610).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset = - RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); + RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let register_asset = RegisterBox::new(Asset::new(asset_id.clone(), 1_u32)); + let register_asset = RegisterExpr::new(Asset::new(asset_id.clone(), 1_u32)); // We can register the non-mintable token test_client.submit_all([create_asset, register_asset.clone()])?; @@ -85,7 +83,7 @@ fn non_mintable_asset_cannot_be_minted_if_registered_with_non_zero_value() -> Re assert!(test_client.submit_blocking(register_asset).is_err()); // And can't be minted - let mint = MintBox::new(1_u32.to_value(), IdBox::AssetId(asset_id)); + let mint = MintExpr::new(1_u32.to_value(), IdBox::AssetId(asset_id)); assert!(test_client.submit_blocking(mint).is_err()); Ok(()) @@ -93,21 +91,21 @@ fn non_mintable_asset_cannot_be_minted_if_registered_with_non_zero_value() -> Re #[test] fn non_mintable_asset_can_be_minted_if_registered_with_zero_value() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_630).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_630).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); // Given let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset = - RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); + RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone()).mintable_once()); let asset_id = AssetId::new(asset_definition_id.clone(), account_id.clone()); - let register_asset = RegisterBox::new(Asset::new(asset_id.clone(), 0_u32)); - let mint = MintBox::new(1_u32.to_value(), IdBox::AssetId(asset_id)); + let register_asset = RegisterExpr::new(Asset::new(asset_id.clone(), 0_u32)); + let mint = MintExpr::new(1_u32.to_value(), IdBox::AssetId(asset_id)); // We can register the non-mintable token wih zero value and then mint it - let instructions: [InstructionBox; 3] = + let instructions: [InstructionExpr; 3] = [create_asset.into(), register_asset.into(), mint.into()]; test_client.submit_all(instructions)?; test_client.poll_request(client::asset::by_account_id(account_id), |result| { diff --git a/client/tests/integration/offline_peers.rs b/client/tests/integration/offline_peers.rs index ee20b58ca4e..5896aed3b20 100644 --- a/client/tests/integration/offline_peers.rs +++ b/client/tests/integration/offline_peers.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use eyre::Result; use iroha_client::client::{self, QueryResult}; use iroha_data_model::{ diff --git a/client/tests/integration/pagination.rs b/client/tests/integration/pagination.rs index e50251eb980..7a7f4141096 100644 --- a/client/tests/integration/pagination.rs +++ b/client/tests/integration/pagination.rs @@ -1,35 +1,56 @@ -#![allow(clippy::restriction)] - use std::num::{NonZeroU32, NonZeroU64}; use eyre::Result; -use iroha_client::client::{asset, QueryResult}; +use iroha_client::client::{asset, Client, QueryResult}; use iroha_data_model::{asset::AssetDefinition, prelude::*, query::Pagination}; use test_network::*; #[test] -fn client_add_asset_quantity_to_existing_asset_should_increase_asset_amount() -> Result<()> { +fn limits_should_work() -> Result<()> { let (_rt, _peer, client) = ::new().with_port(10_690).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); - let register: Vec = ('a'..='z') // This is a subtle mistake, I'm glad we can lint it now. + register_assets(&client)?; + + let vec = &client + .build_query(asset::all_definitions()) + .with_pagination(Pagination { + limit: NonZeroU32::new(5), + start: NonZeroU64::new(5), + }) + .execute()? + .collect::>>()?; + assert_eq!(vec.len(), 5); + Ok(()) +} + +#[test] +fn fetch_size_should_work() -> Result<()> { + let (_rt, _peer, client) = ::new().with_port(11_120).start_with_runtime(); + wait_for_genesis_committed(&vec![client.clone()], 0); + + register_assets(&client)?; + + let iter = client + .build_query(asset::all_definitions()) + .with_pagination(Pagination { + limit: NonZeroU32::new(20), + start: NonZeroU64::new(0), + }) + .with_fetch_size(FetchSize::new(Some(NonZeroU32::new(12).expect("Valid")))) + .execute()?; + assert_eq!(iter.batch_len(), 12); + Ok(()) +} + +fn register_assets(client: &Client) -> Result<()> { + let register: Vec = ('a'..='z') .map(|c| c.to_string()) .map(|name| (name + "#wonderland").parse().expect("Valid")) .map(|asset_definition_id| { - RegisterBox::new(AssetDefinition::quantity(asset_definition_id)).into() + RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)).into() }) .collect(); - client.submit_all_blocking(register)?; - - let vec = client - .request_with_pagination( - asset::all_definitions(), - Pagination { - limit: NonZeroU32::new(5), - start: NonZeroU64::new(5), - }, - )? - .collect::>>()?; - assert_eq!(vec.len(), 5); + let _ = client.submit_all_blocking(register)?; Ok(()) } diff --git a/client/tests/integration/permissions.rs b/client/tests/integration/permissions.rs index 033ab3e1030..7fa9e93cd03 100644 --- a/client/tests/integration/permissions.rs +++ b/client/tests/integration/permissions.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{str::FromStr as _, thread, time::Duration}; use eyre::Result; @@ -18,13 +16,13 @@ fn genesis_transactions_are_validated() { let mut genesis = GenesisNetwork::test(true).expect("Expected genesis"); - let grant_invalid_token = GrantBox::new( + let grant_invalid_token = GrantExpr::new( PermissionToken::new("InvalidToken".parse().unwrap(), &json!(null)), AccountId::from_str("alice@wonderland").unwrap(), ); - let VersionedSignedTransaction::V1(tx_ref) = &mut genesis.transactions.last_mut().unwrap().0; - match &mut tx_ref.payload.instructions { + let tx_ref = &mut genesis.transactions.last_mut().unwrap().0; + match &mut tx_ref.payload_mut().instructions { Executable::Instructions(instructions) => { instructions.push(grant_invalid_token.into()); } @@ -34,7 +32,7 @@ fn genesis_transactions_are_validated() { // Starting peer let (_rt, _peer, test_client) = ::new() .with_genesis(genesis) - .with_port(11_100) + .with_port(11_110) .start_with_runtime(); // Checking that peer contains no blocks multiple times @@ -78,7 +76,7 @@ fn permissions_disallow_asset_transfer() { let bob_id: AccountId = "bob@wonderland".parse().expect("Valid"); let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); let asset_definition_id: AssetDefinitionId = "xor#wonderland".parse().expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); let mouse_keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); let alice_start_assets = get_assets(&iroha_client, &alice_id); @@ -87,7 +85,7 @@ fn permissions_disallow_asset_transfer() { .expect("Failed to prepare state."); let quantity: u32 = 200; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new(asset_definition_id.clone(), bob_id.clone())), ); @@ -96,7 +94,7 @@ fn permissions_disallow_asset_transfer() { .expect("Failed to create asset."); //When - let transfer_asset = TransferBox::new( + let transfer_asset = TransferExpr::new( IdBox::AssetId(AssetId::new(asset_definition_id, bob_id)), quantity.to_value(), IdBox::AccountId(alice_id.clone()), @@ -131,7 +129,7 @@ fn permissions_disallow_asset_burn() { let bob_id: AccountId = "bob@wonderland".parse().expect("Valid"); let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); let mouse_keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); let alice_start_assets = get_assets(&iroha_client, &alice_id); @@ -141,14 +139,14 @@ fn permissions_disallow_asset_burn() { .expect("Failed to prepare state."); let quantity: u32 = 200; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new(asset_definition_id.clone(), bob_id)), ); iroha_client .submit_blocking(mint_asset) .expect("Failed to create asset."); - let burn_asset = BurnBox::new( + let burn_asset = BurnExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new(asset_definition_id, mouse_id.clone())), ); @@ -184,7 +182,7 @@ fn account_can_query_only_its_own_domain() -> Result<()> { // Given let domain_id: DomainId = "wonderland".parse()?; let new_domain_id: DomainId = "wonderland2".parse()?; - let register_domain = RegisterBox::new(Domain::new(new_domain_id.clone())); + let register_domain = RegisterExpr::new(Domain::new(new_domain_id.clone())); client.submit_blocking(register_domain)?; @@ -213,20 +211,20 @@ fn permissions_differ_not_only_by_names() { let new_shoes_definition = AssetDefinition::store(shoes_definition_id.clone()); client .submit_all_blocking([ - RegisterBox::new(new_hat_definition), - RegisterBox::new(new_shoes_definition), + RegisterExpr::new(new_hat_definition), + RegisterExpr::new(new_shoes_definition), ]) .expect("Failed to register new asset definitions"); // Registering mouse let new_mouse_account = Account::new(mouse_id.clone(), [mouse_keypair.public_key().clone()]); client - .submit_blocking(RegisterBox::new(new_mouse_account)) + .submit_blocking(RegisterExpr::new(new_mouse_account)) .expect("Failed to register mouse"); // Granting permission to Alice to modify metadata in Mouse's hats let mouse_hat_id = AssetId::new(hat_definition_id, mouse_id.clone()); - let allow_alice_to_set_key_value_in_hats = GrantBox::new( + let allow_alice_to_set_key_value_in_hats = GrantExpr::new( PermissionToken::new( "CanSetKeyValueInUserAsset".parse().unwrap(), &json!({ "asset_id": mouse_hat_id }), @@ -244,7 +242,7 @@ fn permissions_differ_not_only_by_names() { // Checking that Alice can modify Mouse's hats ... client - .submit_blocking(SetKeyValueBox::new( + .submit_blocking(SetKeyValueExpr::new( mouse_hat_id, Name::from_str("color").expect("Valid"), "red".to_owned(), @@ -253,7 +251,7 @@ fn permissions_differ_not_only_by_names() { // ... but not shoes let mouse_shoes_id = AssetId::new(shoes_definition_id, mouse_id.clone()); - let set_shoes_color = SetKeyValueBox::new( + let set_shoes_color = SetKeyValueExpr::new( mouse_shoes_id.clone(), Name::from_str("color").expect("Valid"), "yellow".to_owned(), @@ -263,7 +261,7 @@ fn permissions_differ_not_only_by_names() { .expect_err("Expected Alice to fail to modify Mouse's shoes"); // Granting permission to Alice to modify metadata in Mouse's shoes - let allow_alice_to_set_key_value_in_shoes = GrantBox::new( + let allow_alice_to_set_key_value_in_shoes = GrantExpr::new( PermissionToken::new( "CanSetKeyValueInUserAsset".parse().unwrap(), &json!({ "asset_id": mouse_shoes_id }), @@ -296,12 +294,12 @@ fn stored_vs_granted_token_payload() -> Result<()> { // Registering mouse and asset definition let asset_definition_id: AssetDefinitionId = "xor#wonderland".parse().expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::store(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::store(asset_definition_id.clone())); let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); let mouse_keypair = iroha_crypto::KeyPair::generate().expect("Failed to generate KeyPair."); let new_mouse_account = Account::new(mouse_id.clone(), [mouse_keypair.public_key().clone()]); - let instructions: [InstructionBox; 2] = [ - RegisterBox::new(new_mouse_account).into(), + let instructions: [InstructionExpr; 2] = [ + RegisterExpr::new(new_mouse_account).into(), create_asset.into(), ]; iroha_client @@ -310,7 +308,7 @@ fn stored_vs_granted_token_payload() -> Result<()> { // Allow alice to mint mouse asset and mint initial value let mouse_asset = AssetId::new(asset_definition_id, mouse_id.clone()); - let allow_alice_to_set_key_value_in_mouse_asset = GrantBox::new( + let allow_alice_to_set_key_value_in_mouse_asset = GrantExpr::new( PermissionToken::from_str_unchecked( "CanSetKeyValueInUserAsset".parse().unwrap(), // NOTE: Introduced additional whitespaces in the serialized form @@ -329,7 +327,7 @@ fn stored_vs_granted_token_payload() -> Result<()> { // Check that alice can indeed mint mouse asset let set_key_value = - SetKeyValueBox::new(mouse_asset, Name::from_str("color")?, "red".to_owned()); + SetKeyValueExpr::new(mouse_asset, Name::from_str("color")?, "red".to_owned()); iroha_client .submit_blocking(set_key_value) .expect("Failed to mint asset for mouse."); diff --git a/client/tests/integration/queries/account.rs b/client/tests/integration/queries/account.rs index f9a07b84560..8698eb77c3f 100644 --- a/client/tests/integration/queries/account.rs +++ b/client/tests/integration/queries/account.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{collections::HashSet, str::FromStr as _}; use eyre::Result; @@ -13,10 +11,9 @@ fn find_accounts_with_asset() -> Result<()> { wait_for_genesis_committed(&[test_client.clone()], 0); // Registering new asset definition - let definition_id = - ::Id::from_str("test_coin#wonderland").expect("Valid"); + let definition_id = AssetDefinitionId::from_str("test_coin#wonderland").expect("Valid"); let asset_definition = AssetDefinition::quantity(definition_id.clone()); - test_client.submit_blocking(RegisterBox::new(asset_definition.clone()))?; + test_client.submit_blocking(RegisterExpr::new(asset_definition.clone()))?; // Checking results before all let received_asset_definition = @@ -41,15 +38,15 @@ fn find_accounts_with_asset() -> Result<()> { .iter() .skip(1) // Alice has already been registered in genesis .cloned() - .map(|account_id| RegisterBox::new(Account::new(account_id, []))) + .map(|account_id| RegisterExpr::new(Account::new(account_id, []))) .collect::>(); test_client.submit_all_blocking(register_accounts)?; let mint_asset = accounts .iter() .cloned() - .map(|account_id| ::Id::new(definition_id.clone(), account_id)) - .map(|asset_id| MintBox::new(1_u32, asset_id)) + .map(|account_id| AssetId::new(definition_id.clone(), account_id)) + .map(|asset_id| MintExpr::new(1_u32, asset_id)) .collect::>(); test_client.submit_all_blocking(mint_asset)?; diff --git a/client/tests/integration/queries/asset.rs b/client/tests/integration/queries/asset.rs index 79b94d3dc38..e833c801972 100644 --- a/client/tests/integration/queries/asset.rs +++ b/client/tests/integration/queries/asset.rs @@ -1,4 +1,3 @@ -#![allow(clippy::restriction)] use eyre::Result; use iroha_client::client::ClientQueryError; use iroha_crypto::KeyPair; @@ -18,7 +17,7 @@ fn find_asset_total_quantity() -> Result<()> { // Register new domain let domain_id: DomainId = "looking_glass".parse()?; let domain = Domain::new(domain_id); - test_client.submit_blocking(RegisterBox::new(domain))?; + test_client.submit_blocking(RegisterExpr::new(domain))?; let accounts: [AccountId; 5] = [ "alice@wonderland".parse()?, @@ -39,7 +38,7 @@ fn find_asset_total_quantity() -> Result<()> { .skip(1) // Alice has already been registered in genesis .cloned() .zip(keys.iter().map(KeyPair::public_key).cloned()) - .map(|(account_id, public_key)| RegisterBox::new(Account::new(account_id, [public_key]))) + .map(|(account_id, public_key)| RegisterExpr::new(Account::new(account_id, [public_key]))) .collect::>(); test_client.submit_all_blocking(register_accounts)?; @@ -78,15 +77,15 @@ fn find_asset_total_quantity() -> Result<()> { ), ] { // Registering new asset definition - let definition_id: ::Id = + let definition_id: AssetDefinitionId = definition.parse().expect("Failed to parse `definition_id`"); let asset_definition = AssetDefinition::new(definition_id.clone(), asset_value_type); - test_client.submit_blocking(RegisterBox::new(asset_definition.clone()))?; + test_client.submit_blocking(RegisterExpr::new(asset_definition.clone()))?; let asset_ids = accounts .iter() .cloned() - .map(|account_id| ::Id::new(definition_id.clone(), account_id)) + .map(|account_id| AssetId::new(definition_id.clone(), account_id)) .collect::>(); // Assert that initial total quantity before any burns and mints is zero @@ -99,20 +98,20 @@ fn find_asset_total_quantity() -> Result<()> { .iter() .cloned() .map(|asset_id| Asset::new(asset_id, initial_value.clone())) - .map(RegisterBox::new) + .map(RegisterExpr::new) .collect::>(); test_client.submit_all_blocking(register_asset)?; let mint_asset = asset_ids .iter() .cloned() - .map(|asset_id| MintBox::new(to_mint.clone(), asset_id)); + .map(|asset_id| MintExpr::new(to_mint.clone(), asset_id)); test_client.submit_all_blocking(mint_asset)?; let burn_asset = asset_ids .iter() .cloned() - .map(|asset_id| BurnBox::new(to_burn.clone(), asset_id)) + .map(|asset_id| BurnExpr::new(to_burn.clone(), asset_id)) .collect::>(); test_client.submit_all_blocking(burn_asset)?; @@ -125,7 +124,7 @@ fn find_asset_total_quantity() -> Result<()> { let unregister_asset = asset_ids .iter() .cloned() - .map(UnregisterBox::new) + .map(UnregisterExpr::new) .collect::>(); test_client.submit_all_blocking(unregister_asset)?; @@ -136,7 +135,7 @@ fn find_asset_total_quantity() -> Result<()> { assert!(total_asset_quantity.is_zero_value()); // Unregister asset definition - test_client.submit_blocking(UnregisterBox::new(definition_id.clone()))?; + test_client.submit_blocking(UnregisterExpr::new(definition_id.clone()))?; // Assert that total asset quantity cleared with unregistering of asset definition let result = test_client.request(FindTotalAssetQuantityByAssetDefinitionId::new( @@ -151,15 +150,14 @@ fn find_asset_total_quantity() -> Result<()> { } // Test for `Store` asset value type - let definition_id: ::Id = - "store#wonderland".parse().expect("Valid"); + let definition_id: AssetDefinitionId = "store#wonderland".parse().expect("Valid"); let asset_definition = AssetDefinition::store(definition_id.clone()); - test_client.submit_blocking(RegisterBox::new(asset_definition))?; + test_client.submit_blocking(RegisterExpr::new(asset_definition))?; let asset_ids = accounts .iter() .cloned() - .map(|account_id| ::Id::new(definition_id.clone(), account_id)) + .map(|account_id| AssetId::new(definition_id.clone(), account_id)) .collect::>(); // Assert that initial total quantity before any registrations and unregistrations is zero @@ -172,7 +170,7 @@ fn find_asset_total_quantity() -> Result<()> { .iter() .cloned() .map(|asset_id| Asset::new(asset_id, Metadata::default())) - .map(RegisterBox::new) + .map(RegisterExpr::new) .collect::>(); test_client.submit_all_blocking(register_asset)?; @@ -185,7 +183,7 @@ fn find_asset_total_quantity() -> Result<()> { let unregister_asset = asset_ids .iter() .cloned() - .map(UnregisterBox::new) + .map(UnregisterExpr::new) .collect::>(); test_client.submit_all_blocking(unregister_asset)?; @@ -196,7 +194,7 @@ fn find_asset_total_quantity() -> Result<()> { assert!(total_asset_quantity.is_zero_value()); // Unregister asset definition - test_client.submit_blocking(UnregisterBox::new(definition_id.clone()))?; + test_client.submit_blocking(UnregisterExpr::new(definition_id.clone()))?; // Assert that total asset quantity cleared with unregistering of asset definition let result = test_client.request(FindTotalAssetQuantityByAssetDefinitionId::new( diff --git a/client/tests/integration/queries/mod.rs b/client/tests/integration/queries/mod.rs index a3805003c80..01c266191c7 100644 --- a/client/tests/integration/queries/mod.rs +++ b/client/tests/integration/queries/mod.rs @@ -1,3 +1,74 @@ +use std::str::FromStr as _; + +use eyre::{bail, Result}; +use iroha_client::client::{self, ClientQueryError}; +use iroha_data_model::{ + prelude::*, + query::{cursor::ForwardCursor, error::QueryExecutionFail, MAX_FETCH_SIZE}, +}; +use test_network::*; + mod account; mod asset; mod role; + +#[test] +fn too_big_fetch_size_is_not_allowed() { + let (_rt, _peer, client) = ::new().with_port(11_130).start_with_runtime(); + wait_for_genesis_committed(&[client.clone()], 0); + + let err = client + .build_query(client::asset::all()) + .with_fetch_size(FetchSize::new(Some(MAX_FETCH_SIZE.checked_add(1).unwrap()))) + .execute() + .expect_err("Should fail"); + + assert!(matches!( + err, + ClientQueryError::Validation(ValidationFail::QueryFailed( + QueryExecutionFail::FetchSizeTooBig + )) + )); +} + +#[test] +fn live_query_is_dropped_after_smart_contract_end() -> Result<()> { + let (_rt, _peer, client) = ::new().with_port(11_140).start_with_runtime(); + wait_for_genesis_committed(&[client.clone()], 0); + + let wasm = iroha_wasm_builder::Builder::new( + "tests/integration/smartcontracts/query_assets_and_save_cursor", + ) + .show_output() + .build()? + .optimize()? + .into_bytes()?; + + let transaction = client.build_transaction( + WasmSmartContract::from_compiled(wasm), + UnlimitedMetadata::default(), + )?; + client.submit_transaction_blocking(&transaction)?; + + let metadata_value = client.request(FindAccountKeyValueByIdAndKey::new( + client.account_id.clone(), + Name::from_str("cursor").unwrap(), + ))?; + let Value::String(cursor) = metadata_value.0 else { + bail!("Expected `Value::String`, got {:?}", metadata_value.0); + }; + let asset_cursor = serde_json::from_str::(&cursor)?; + + let err = client + .request_with_cursor::>(asset_cursor) + .expect_err("Request with cursor from smart contract should fail"); + + assert!(matches!( + err, + ClientQueryError::Validation(ValidationFail::QueryFailed( + QueryExecutionFail::UnknownCursor + )) + )); + + Ok(()) +} diff --git a/client/tests/integration/queries/role.rs b/client/tests/integration/queries/role.rs index 36f42004123..2151b809f34 100644 --- a/client/tests/integration/queries/role.rs +++ b/client/tests/integration/queries/role.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::collections::HashSet; use eyre::Result; @@ -8,7 +6,7 @@ use iroha_data_model::{prelude::*, query::error::QueryExecutionFail}; use serde_json::json; use test_network::*; -fn create_role_ids() -> [::Id; 5] { +fn create_role_ids() -> [RoleId; 5] { [ "a".parse().expect("Valid"), "b".parse().expect("Valid"), @@ -29,7 +27,7 @@ fn find_roles() -> Result<()> { let register_roles = role_ids .iter() .cloned() - .map(|role_id| RegisterBox::new(Role::new(role_id))) + .map(|role_id| RegisterExpr::new(Role::new(role_id))) .collect::>(); test_client.submit_all_blocking(register_roles)?; @@ -61,7 +59,7 @@ fn find_role_ids() -> Result<()> { let register_roles = role_ids .iter() .cloned() - .map(|role_id| RegisterBox::new(Role::new(role_id))) + .map(|role_id| RegisterExpr::new(Role::new(role_id))) .collect::>(); test_client.submit_all_blocking(register_roles)?; @@ -83,11 +81,11 @@ fn find_role_by_id() -> Result<()> { let (_rt, _peer, test_client) = ::new().with_port(10_535).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); - let role_id: ::Id = "root".parse().expect("Valid"); + let role_id: RoleId = "root".parse().expect("Valid"); let new_role = Role::new(role_id.clone()); // Registering role - let register_role = RegisterBox::new(new_role.clone()); + let register_role = RegisterExpr::new(new_role.clone()); test_client.submit_blocking(register_role)?; let found_role = test_client.request(client::role::by_id(role_id))?; @@ -103,7 +101,7 @@ fn find_unregistered_role_by_id() { let (_rt, _peer, test_client) = ::new().with_port(10_540).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); - let role_id: ::Id = "root".parse().expect("Valid"); + let role_id: RoleId = "root".parse().expect("Valid"); let found_role = test_client.request(client::role::by_id(role_id)); @@ -123,14 +121,14 @@ fn find_roles_by_account_id() -> Result<()> { wait_for_genesis_committed(&[test_client.clone()], 0); let role_ids = create_role_ids(); - let alice_id: ::Id = "alice@wonderland".parse().expect("Valid"); + let alice_id: AccountId = "alice@wonderland".parse().expect("Valid"); // Registering roles let register_roles = role_ids .iter() .cloned() .map(|role_id| { - RegisterBox::new(Role::new(role_id).add_permission(PermissionToken::new( + RegisterExpr::new(Role::new(role_id).add_permission(PermissionToken::new( "CanSetKeyValueInUserAccount".parse().unwrap(), &json!({ "account_id": alice_id }), ))) @@ -142,7 +140,7 @@ fn find_roles_by_account_id() -> Result<()> { let grant_roles = role_ids .iter() .cloned() - .map(|role_id| GrantBox::new(role_id, alice_id.clone())) + .map(|role_id| GrantExpr::new(role_id, alice_id.clone())) .collect::>(); test_client.submit_all_blocking(grant_roles)?; diff --git a/client/tests/integration/query_errors.rs b/client/tests/integration/query_errors.rs index 636048cb75b..1d307b69e4a 100644 --- a/client/tests/integration/query_errors.rs +++ b/client/tests/integration/query_errors.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::str::FromStr; use iroha_client::client::{self, ClientQueryError}; diff --git a/client/tests/integration/restart_peer.rs b/client/tests/integration/restart_peer.rs index dbc4f2cd082..d3de54b4aaf 100644 --- a/client/tests/integration/restart_peer.rs +++ b/client/tests/integration/restart_peer.rs @@ -1,10 +1,9 @@ -#![allow(clippy::restriction)] - use std::{str::FromStr, sync::Arc}; use eyre::Result; use iroha_client::client::{self, QueryResult}; use iroha_data_model::prelude::*; +use iroha_primitives::unique_vec; use tempfile::TempDir; use test_network::*; use tokio::runtime::Runtime; @@ -17,14 +16,14 @@ fn restarted_peer_should_have_the_same_asset_amount() -> Result<()> { let mut configuration = Configuration::test(); let mut peer = ::new().with_port(10_000).build()?; - configuration.sumeragi.trusted_peers.peers = std::iter::once(peer.id.clone()).collect(); + configuration.sumeragi.trusted_peers.peers = unique_vec![peer.id.clone()]; let account_id = AccountId::from_str("alice@wonderland").unwrap(); let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").unwrap(); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); let quantity: u32 = 200; - let mut iroha_client = client::Client::test(&peer.api_address, &peer.telemetry_address); + let iroha_client = client::Client::test(&peer.api_address); { let rt = Runtime::test(); @@ -37,7 +36,7 @@ fn restarted_peer_should_have_the_same_asset_amount() -> Result<()> { wait_for_genesis_committed(&vec![iroha_client.clone()], 0); iroha_client.submit_blocking(create_asset)?; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), diff --git a/client/tests/integration/roles.rs b/client/tests/integration/roles.rs index 7a030a76053..326487c3fb7 100644 --- a/client/tests/integration/roles.rs +++ b/client/tests/integration/roles.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::str::FromStr as _; use eyre::Result; @@ -14,7 +12,7 @@ fn register_empty_role() -> Result<()> { wait_for_genesis_committed(&vec![test_client.clone()], 0); let role_id = "root".parse().expect("Valid"); - let register_role = RegisterBox::new(Role::new(role_id)); + let register_role = RegisterExpr::new(Role::new(role_id)); test_client.submit(register_role)?; Ok(()) @@ -29,7 +27,7 @@ fn register_role_with_empty_token_params() -> Result<()> { let token = PermissionToken::new("token".parse()?, &json!(null)); let role = Role::new(role_id).add_permission(token); - test_client.submit(RegisterBox::new(role))?; + test_client.submit(RegisterExpr::new(role))?; Ok(()) } @@ -48,19 +46,19 @@ fn register_and_grant_role_for_metadata_access() -> Result<()> { let (_rt, _peer, test_client) = ::new().with_port(10_700).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); - let alice_id = ::Id::from_str("alice@wonderland")?; - let mouse_id = ::Id::from_str("mouse@wonderland")?; + let alice_id = AccountId::from_str("alice@wonderland")?; + let mouse_id = AccountId::from_str("mouse@wonderland")?; // Registering Mouse let mouse_key_pair = iroha_crypto::KeyPair::generate()?; - let register_mouse = RegisterBox::new(Account::new( + let register_mouse = RegisterExpr::new(Account::new( mouse_id.clone(), [mouse_key_pair.public_key().clone()], )); test_client.submit_blocking(register_mouse)?; // Registering role - let role_id = ::Id::from_str("ACCESS_TO_MOUSE_METADATA")?; + let role_id = RoleId::from_str("ACCESS_TO_MOUSE_METADATA")?; let role = Role::new(role_id.clone()) .add_permission(PermissionToken::new( "CanSetKeyValueInUserAccount".parse()?, @@ -70,18 +68,18 @@ fn register_and_grant_role_for_metadata_access() -> Result<()> { "CanRemoveKeyValueInUserAccount".parse()?, &json!({ "account_id": mouse_id }), )); - let register_role = RegisterBox::new(role); + let register_role = RegisterExpr::new(role); test_client.submit_blocking(register_role)?; // Mouse grants role to Alice - let grant_role = GrantBox::new(role_id.clone(), alice_id.clone()); + let grant_role = GrantExpr::new(role_id.clone(), alice_id.clone()); let grant_role_tx = TransactionBuilder::new(mouse_id.clone()) .with_instructions([grant_role]) .sign(mouse_key_pair)?; test_client.submit_transaction_blocking(&grant_role_tx)?; // Alice modifies Mouse's metadata - let set_key_value = SetKeyValueBox::new( + let set_key_value = SetKeyValueExpr::new( mouse_id, Name::from_str("key").expect("Valid"), Value::String("value".to_owned()), @@ -102,16 +100,16 @@ fn unregistered_role_removed_from_account() -> Result<()> { let (_rt, _peer, test_client) = ::new().with_port(10_705).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); - let role_id: ::Id = "root".parse().expect("Valid"); - let alice_id: ::Id = "alice@wonderland".parse().expect("Valid"); - let mouse_id: ::Id = "mouse@wonderland".parse().expect("Valid"); + let role_id: RoleId = "root".parse().expect("Valid"); + let alice_id: AccountId = "alice@wonderland".parse().expect("Valid"); + let mouse_id: AccountId = "mouse@wonderland".parse().expect("Valid"); // Registering Mouse - let register_mouse = RegisterBox::new(Account::new(mouse_id.clone(), [])); + let register_mouse = RegisterExpr::new(Account::new(mouse_id.clone(), [])); test_client.submit_blocking(register_mouse)?; // Register root role - let register_role = RegisterBox::new(Role::new(role_id.clone()).add_permission( + let register_role = RegisterExpr::new(Role::new(role_id.clone()).add_permission( PermissionToken::new( "CanSetKeyValueInUserAccount".parse()?, &json!({ "account_id": alice_id }), @@ -120,7 +118,7 @@ fn unregistered_role_removed_from_account() -> Result<()> { test_client.submit_blocking(register_role)?; // Grant root role to Mouse - let grant_role = GrantBox::new(role_id.clone(), mouse_id.clone()); + let grant_role = GrantExpr::new(role_id.clone(), mouse_id.clone()); test_client.submit_blocking(grant_role)?; // Check that Mouse has root role @@ -130,7 +128,7 @@ fn unregistered_role_removed_from_account() -> Result<()> { assert!(found_mouse_roles.contains(&role_id)); // Unregister root role - let unregister_role = UnregisterBox::new(role_id.clone()); + let unregister_role = UnregisterExpr::new(role_id.clone()); test_client.submit_blocking(unregister_role)?; // Check that Mouse doesn't have the root role @@ -155,7 +153,7 @@ fn role_with_invalid_permissions_is_not_accepted() -> Result<()> { )); let err = test_client - .submit_blocking(RegisterBox::new(role)) + .submit_blocking(RegisterExpr::new(role)) .expect_err("Submitting role with invalid permission token should fail"); let rejection_reason = err diff --git a/client/tests/integration/set_parameter.rs b/client/tests/integration/set_parameter.rs index a9533f5f541..c31563db662 100644 --- a/client/tests/integration/set_parameter.rs +++ b/client/tests/integration/set_parameter.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::str::FromStr; use eyre::Result; @@ -14,7 +12,7 @@ fn can_change_parameter_value() -> Result<()> { let parameter = Parameter::from_str("?BlockTime=4000")?; let parameter_id = ParameterId::from_str("BlockTime")?; - let param_box = SetParameterBox::new(parameter); + let param_box = SetParameterExpr::new(parameter); let old_params = test_client .request(client::parameter::all())? @@ -46,13 +44,13 @@ fn parameter_propagated() -> Result<()> { wait_for_genesis_committed(&vec![test_client.clone()], 0); let too_long_domain_name: DomainId = "0".repeat(2_usize.pow(8)).parse()?; - let create_domain = RegisterBox::new(Domain::new(too_long_domain_name)); + let create_domain = RegisterExpr::new(Domain::new(too_long_domain_name)); let _ = test_client .submit_blocking(create_domain.clone()) .expect_err("Should fail before ident length limits update"); let parameter = Parameter::from_str("?WSVIdentLengthLimits=1,256_LL")?; - let param_box = SetParameterBox::new(parameter); + let param_box = SetParameterExpr::new(parameter); test_client.submit_blocking(param_box)?; test_client diff --git a/client/tests/integration/smartcontracts/.cargo/config.toml b/client/tests/integration/smartcontracts/.cargo/config.toml new file mode 100644 index 00000000000..f4e8c002fc2 --- /dev/null +++ b/client/tests/integration/smartcontracts/.cargo/config.toml @@ -0,0 +1,2 @@ +[build] +target = "wasm32-unknown-unknown" diff --git a/client/tests/integration/smartcontracts/Cargo.toml b/client/tests/integration/smartcontracts/Cargo.toml index ae766717c1b..1ab1801377d 100644 --- a/client/tests/integration/smartcontracts/Cargo.toml +++ b/client/tests/integration/smartcontracts/Cargo.toml @@ -1,6 +1,6 @@ [workspace.package] edition = "2021" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" # TODO: teams are being deprecated update the authors URL authors = ["Iroha 2 team "] @@ -11,9 +11,10 @@ resolver = "2" members = [ "create_nft_for_every_user_trigger", "mint_rose_trigger", - "validator_with_admin", - "validator_with_custom_token", - "validator_with_migration_fail", + "executor_with_admin", + "executor_with_custom_token", + "executor_with_migration_fail", + "query_assets_and_save_cursor", ] [profile.dev] @@ -27,13 +28,15 @@ opt-level = "z" # Optimize for size vs speed with "s"/"z"(removes vectorizat codegen-units = 1 # Further reduces binary size but increases compilation time [workspace.dependencies] -iroha_wasm = { version = "=2.0.0-pre-rc.19", path = "../../../../wasm", features = ["debug"]} -iroha_trigger = { version = "=2.0.0-pre-rc.19", path = "../../../../wasm/trigger", features = ["debug"]} -iroha_validator = { version = "=2.0.0-pre-rc.19", path = "../../../../wasm/validator" } -iroha_schema = { version = "=2.0.0-pre-rc.19", path = "../../../../schema" } +iroha_smart_contract = { version = "=2.0.0-pre-rc.20", path = "../../../../smart_contract", features = ["debug"]} +iroha_trigger = { version = "=2.0.0-pre-rc.20", path = "../../../../smart_contract/trigger", features = ["debug"]} +iroha_executor = { version = "=2.0.0-pre-rc.20", path = "../../../../smart_contract/executor" } +iroha_schema = { version = "=2.0.0-pre-rc.20", path = "../../../../schema" } parity-scale-codec = { version = "3.2.1", default-features = false } -panic-halt = "0.2.0" anyhow = { version = "1.0.71", default-features = false } serde = { version = "1.0.151", default-features = false } serde_json = { version = "1.0.91", default-features = false } + +lol_alloc = "0.4.0" +panic-halt = "0.2.0" diff --git a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/Cargo.toml b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/Cargo.toml index b13b19b0d81..a01eeabcb9e 100644 --- a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/Cargo.toml +++ b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/Cargo.toml @@ -12,4 +12,6 @@ crate-type = ['cdylib'] [dependencies] iroha_trigger.workspace = true + panic-halt.workspace = true +lol_alloc.workspace = true diff --git a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs index 5a01c663f5f..5fa18856b48 100644 --- a/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs +++ b/client/tests/integration/smartcontracts/create_nft_for_every_user_trigger/src/lib.rs @@ -1,5 +1,4 @@ //! Smartcontract which creates new nft for every user - #![no_std] extern crate alloc; @@ -9,16 +8,22 @@ extern crate panic_halt; use alloc::{format, string::ToString}; use iroha_trigger::prelude::*; +use lol_alloc::{FreeListAllocator, LockedAllocator}; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); #[iroha_trigger::main] fn main(_owner: AccountId, _event: Event) { - iroha_trigger::info!("Executing trigger"); + iroha_trigger::log::info!("Executing trigger"); - let accounts = FindAllAccounts.execute().dbg_unwrap(); + let accounts_cursor = FindAllAccounts.execute().dbg_unwrap(); let limits = MetadataLimits::new(256, 256); - for account in accounts { + for account in accounts_cursor { + let account = account.dbg_unwrap(); + let mut metadata = Metadata::new(); let name = format!( "nft_for_{}_in_{}", @@ -35,28 +40,29 @@ fn main(_owner: AccountId, _event: Event) { let nft_definition = AssetDefinition::store(nft_id.clone()) .mintable_once() .with_metadata(metadata); - let account_nft_id = ::Id::new(nft_id, account.id().clone()); + let account_nft_id = AssetId::new(nft_id, account.id().clone()); let account_nft = Asset::new(account_nft_id, Metadata::new()); - RegisterBox::new(nft_definition).execute().dbg_unwrap(); - RegisterBox::new(account_nft).execute().dbg_unwrap(); + RegisterExpr::new(nft_definition).execute().dbg_unwrap(); + RegisterExpr::new(account_nft).execute().dbg_unwrap(); } - iroha_trigger::info!("Smart contract executed successfully"); + iroha_trigger::log::info!("Smart contract executed successfully"); } -fn generate_new_nft_id(account_id: &::Id) -> AssetDefinitionId { +fn generate_new_nft_id(account_id: &AccountId) -> AssetDefinitionId { let assets = FindAssetsByAccountId::new(account_id.clone()) .execute() .dbg_unwrap(); let new_number = assets .into_iter() + .map(|res| res.dbg_unwrap()) .filter(|asset| asset.id().definition_id().to_string().starts_with("nft_")) .count() .checked_add(1) .dbg_unwrap(); - iroha_trigger::debug!(&format!("New number: {}", new_number)); + iroha_trigger::log::debug!(&format!("New number: {}", new_number)); format!( "nft_number_{}_for_{}#{}", diff --git a/client/tests/integration/smartcontracts/validator_with_admin/Cargo.toml b/client/tests/integration/smartcontracts/executor_with_admin/Cargo.toml similarity index 71% rename from client/tests/integration/smartcontracts/validator_with_admin/Cargo.toml rename to client/tests/integration/smartcontracts/executor_with_admin/Cargo.toml index 1b465081974..c48ea913d35 100644 --- a/client/tests/integration/smartcontracts/validator_with_admin/Cargo.toml +++ b/client/tests/integration/smartcontracts/executor_with_admin/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "validator_with_admin" +name = "executor_with_admin" edition.workspace = true version.workspace = true @@ -11,7 +11,8 @@ license.workspace = true crate-type = ['cdylib'] [dependencies] -iroha_validator.workspace = true +iroha_executor.workspace = true iroha_schema.workspace = true panic-halt.workspace = true +lol_alloc.workspace = true diff --git a/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs new file mode 100644 index 00000000000..1b3b6443ef8 --- /dev/null +++ b/client/tests/integration/smartcontracts/executor_with_admin/src/lib.rs @@ -0,0 +1,34 @@ +//! Runtime Executor which allows any instruction executed by `admin@admin` account. +//! If authority is not `admin@admin` then default validation is used as a backup. + +#![no_std] + +#[cfg(not(test))] +extern crate panic_halt; + +use iroha_executor::{parse, prelude::*, smart_contract}; +use lol_alloc::{FreeListAllocator, LockedAllocator}; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); + +#[derive(Constructor, ValidateEntrypoints, ExpressionEvaluator, Validate, Visit)] +#[visit(custom(visit_instruction))] +struct Executor { + verdict: Result, + block_height: u64, + host: smart_contract::Host, +} + +fn visit_instruction(executor: &mut Executor, authority: &AccountId, isi: &InstructionExpr) { + if parse!("admin@admin" as AccountId) == *authority { + pass!(executor); + } + + iroha_executor::default::visit_instruction(executor, authority, isi); +} + +#[entrypoint] +pub fn migrate(_block_height: u64) -> MigrationResult { + Ok(()) +} diff --git a/client/tests/integration/smartcontracts/validator_with_custom_token/Cargo.toml b/client/tests/integration/smartcontracts/executor_with_custom_token/Cargo.toml similarity index 77% rename from client/tests/integration/smartcontracts/validator_with_custom_token/Cargo.toml rename to client/tests/integration/smartcontracts/executor_with_custom_token/Cargo.toml index 431eae19798..1b305798354 100644 --- a/client/tests/integration/smartcontracts/validator_with_custom_token/Cargo.toml +++ b/client/tests/integration/smartcontracts/executor_with_custom_token/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "validator_with_custom_token" +name = "executor_with_custom_token" edition.workspace = true version.workspace = true @@ -11,11 +11,13 @@ license.workspace = true crate-type = ['cdylib'] [dependencies] -iroha_validator.workspace = true +iroha_executor.workspace = true iroha_schema.workspace = true parity-scale-codec.workspace = true -panic-halt.workspace = true anyhow.workspace = true serde_json.workspace = true serde.workspace = true + +panic-halt.workspace = true +lol_alloc.workspace = true diff --git a/client/tests/integration/smartcontracts/executor_with_custom_token/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_custom_token/src/lib.rs new file mode 100644 index 00000000000..f75d0e43fed --- /dev/null +++ b/client/tests/integration/smartcontracts/executor_with_custom_token/src/lib.rs @@ -0,0 +1,217 @@ +//! Runtime Executor which allows domain (un-)registration only for users who own +//! [`token::CanControlDomainLives`] permission token. +//! +//! This executor should be applied on top of the blockchain with default validation. +//! +//! It also doesn't have [`iroha_executor::default::tokens::domain::CanUnregisterDomain`]. +//! +//! In migration it replaces [`iroha_executor::default::tokens::domain::CanUnregisterDomain`] +//! with [`token::CanControlDomainLives`] for all accounts. +//! So it doesn't matter which domain user was able to unregister before migration, they will +//! get access to control all domains. Remember that this is just a test example. + +#![no_std] + +extern crate alloc; +#[cfg(not(test))] +extern crate panic_halt; + +use alloc::{borrow::ToOwned, string::String}; + +use anyhow::anyhow; +use iroha_executor::{ + default::default_permission_token_schema, permission::Token as _, prelude::*, smart_contract, +}; +use iroha_schema::IntoSchema; +use lol_alloc::{FreeListAllocator, LockedAllocator}; +use parity_scale_codec::{Decode, Encode}; +use serde::{Deserialize, Serialize}; +use serde_json::json; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); + +use alloc::format; + +mod token { + //! Module with custom token. + + use super::*; + + /// Token to identify if user can (un-)register domains. + #[derive( + PartialEq, + Eq, + Token, + ValidateGrantRevoke, + Decode, + Encode, + IntoSchema, + Serialize, + Deserialize, + )] + #[validate(iroha_executor::permission::OnlyGenesis)] + pub struct CanControlDomainLives; +} + +#[derive(Constructor, ValidateEntrypoints, ExpressionEvaluator, Validate, Visit)] +#[visit(custom(visit_register_domain, visit_unregister_domain))] +struct Executor { + verdict: Result, + block_height: u64, + host: smart_contract::Host, +} + +impl Executor { + fn get_all_accounts_with_can_unregister_domain_permission( + ) -> Result, MigrationError> { + let accounts = FindAllAccounts.execute().map_err(|error| { + format!("{:?}", anyhow!(error).context("Failed to get all accounts")) + })?; + + let mut found_accounts = Vec::new(); + + for account in accounts { + let account = account.map_err(|error| { + format!("{:?}", anyhow!(error).context("Failed to get account")) + })?; + let permission_tokens = FindPermissionTokensByAccountId::new(account.id().clone()) + .execute() + .map_err(|error| { + format!( + "{:?}", + anyhow!(error).context(format!( + "Failed to get permissions for account `{}`", + account.id() + )) + ) + })?; + + for token in permission_tokens { + let token = token.map_err(|error| { + format!( + "{:?}", + anyhow!(error).context("Failed to get permission token") + ) + })?; + + if let Ok(can_unregister_domain_token) = + iroha_executor::default::tokens::domain::CanUnregisterDomain::try_from(token) + { + found_accounts.push((account, can_unregister_domain_token.domain_id)); + break; + } + } + } + + Ok(found_accounts) + } + + fn replace_token(accounts: &[(Account, DomainId)]) -> MigrationResult { + let can_unregister_domain_definition_id = PermissionTokenId::try_from( + iroha_executor::default::tokens::domain::CanUnregisterDomain::type_name(), + ) + .unwrap(); + + let can_control_domain_lives_definition_id = + PermissionTokenId::try_from(token::CanControlDomainLives::type_name()).unwrap(); + + accounts + .iter() + .try_for_each(|(account, domain_id)| { + RevokeExpr::new( + PermissionToken::new( + can_unregister_domain_definition_id.clone(), + &json!({ "domain_id": domain_id }), + ), + account.id().clone(), + ) + .execute() + .map_err(|error| { + format!( + "{:?}", + anyhow!(error).context(format!( + "Failed to revoke `{}` token from account `{}`", + can_unregister_domain_definition_id, + account.id() + )) + ) + })?; + + GrantExpr::new( + PermissionToken::new( + can_control_domain_lives_definition_id.clone(), + &json!(null), + ), + account.id().clone(), + ) + .execute() + .map_err(|error| { + format!( + "{:?}", + anyhow!(error).context(format!( + "Failed to grant `{}` token from account `{}`", + can_control_domain_lives_definition_id, + account.id() + )) + ) + }) + }) + .map_err(|error| { + iroha_executor::log::error!(&error); + format!( + "{:?}", + anyhow!(error).context(format!( + "Failed to replace `{}` token with `{}` for accounts", + can_unregister_domain_definition_id, can_control_domain_lives_definition_id, + )) + ) + }) + } +} + +// TODO (#4049): Fix unused `visit_register_domain()` +fn visit_register_domain(executor: &mut Executor, authority: &AccountId, _isi: Register) { + if executor.block_height() == 0 { + pass!(executor) + } + if token::CanControlDomainLives.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "You don't have permission to register a new domain" + ); +} + +fn visit_unregister_domain( + executor: &mut Executor, + authority: &AccountId, + _isi: Unregister, +) { + if executor.block_height() == 0 { + pass!(executor); + } + if token::CanControlDomainLives.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "You don't have permission to unregister domain"); +} + +#[entrypoint] +pub fn migrate(_block_height: u64) -> MigrationResult { + let accounts = Executor::get_all_accounts_with_can_unregister_domain_permission()?; + + let mut schema = default_permission_token_schema(); + schema.remove::(); + schema.insert::(); + + let (token_ids, schema_str) = schema.serialize(); + iroha_executor::set_permission_token_schema( + &iroha_executor::data_model::permission::PermissionTokenSchema::new(token_ids, schema_str), + ); + + Executor::replace_token(&accounts) +} diff --git a/client/tests/integration/smartcontracts/validator_with_migration_fail/Cargo.toml b/client/tests/integration/smartcontracts/executor_with_migration_fail/Cargo.toml similarity index 68% rename from client/tests/integration/smartcontracts/validator_with_migration_fail/Cargo.toml rename to client/tests/integration/smartcontracts/executor_with_migration_fail/Cargo.toml index abb13de0a70..813a5d74ef2 100644 --- a/client/tests/integration/smartcontracts/validator_with_migration_fail/Cargo.toml +++ b/client/tests/integration/smartcontracts/executor_with_migration_fail/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "validator_with_migration_fail" +name = "executor_with_migration_fail" edition.workspace = true version.workspace = true @@ -11,6 +11,8 @@ license.workspace = true crate-type = ['cdylib'] [dependencies] -iroha_validator.workspace = true -panic-halt.workspace = true +iroha_executor.workspace = true anyhow.workspace = true + +panic-halt.workspace = true +lol_alloc.workspace = true diff --git a/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs b/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs new file mode 100644 index 00000000000..437e01b9bdd --- /dev/null +++ b/client/tests/integration/smartcontracts/executor_with_migration_fail/src/lib.rs @@ -0,0 +1,41 @@ +//! Runtime Executor which copies default validation logic but forbids any queries and fails to migrate. + +#![no_std] + +extern crate alloc; +#[cfg(not(test))] +extern crate panic_halt; + +use alloc::{borrow::ToOwned as _, format}; + +use anyhow::anyhow; +use iroha_executor::{parse, prelude::*, smart_contract}; +use lol_alloc::{FreeListAllocator, LockedAllocator}; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); + +#[derive(Constructor, ValidateEntrypoints, ExpressionEvaluator, Validate, Visit)] +struct Executor { + verdict: Result, + block_height: u64, + host: smart_contract::Host, +} + +#[entrypoint] +pub fn migrate(_block_height: u64) -> MigrationResult { + // Performing side-effects to check in the test that it won't be applied after failure + + // Registering a new domain (using ISI) + let domain_id = parse!("failed_migration_test_domain" as DomainId); + RegisterExpr::new(Domain::new(domain_id)) + .execute() + .map_err(|error| { + format!( + "{:?}", + anyhow!(error).context("Failed to register test domain") + ) + })?; + + Err("This executor always fails to migrate".to_owned()) +} diff --git a/client/tests/integration/smartcontracts/mint_rose_trigger/Cargo.toml b/client/tests/integration/smartcontracts/mint_rose_trigger/Cargo.toml index 98c7c9992bd..cf5deba651d 100644 --- a/client/tests/integration/smartcontracts/mint_rose_trigger/Cargo.toml +++ b/client/tests/integration/smartcontracts/mint_rose_trigger/Cargo.toml @@ -12,4 +12,6 @@ crate-type = ['cdylib'] [dependencies] iroha_trigger.workspace = true + panic-halt.workspace = true +lol_alloc.workspace = true diff --git a/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs b/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs index c49a6136f71..7dd2d5c7c0d 100644 --- a/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs +++ b/client/tests/integration/smartcontracts/mint_rose_trigger/src/lib.rs @@ -8,6 +8,10 @@ extern crate panic_halt; use core::str::FromStr as _; use iroha_trigger::prelude::*; +use lol_alloc::{FreeListAllocator, LockedAllocator}; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); /// Mint 1 rose for owner #[iroha_trigger::main] @@ -16,7 +20,7 @@ fn main(owner: AccountId, _event: Event) { .dbg_expect("Failed to parse `rose#wonderland` asset definition id"); let rose_id = AssetId::new(rose_definition_id, owner); - MintBox::new(1_u32, rose_id) + MintExpr::new(1_u32, rose_id) .execute() .dbg_expect("Failed to mint rose"); } diff --git a/client/tests/integration/smartcontracts/query_assets_and_save_cursor/Cargo.toml b/client/tests/integration/smartcontracts/query_assets_and_save_cursor/Cargo.toml new file mode 100644 index 00000000000..bc012a36958 --- /dev/null +++ b/client/tests/integration/smartcontracts/query_assets_and_save_cursor/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "query_assets_and_save_cursor" + +edition.workspace = true +version.workspace = true +authors.workspace = true + +license.workspace = true + +[lib] +crate-type = ['cdylib'] + +[dependencies] +iroha_smart_contract.workspace = true + +panic-halt.workspace = true +lol_alloc.workspace = true +serde_json = { version = "1.0.108", default-features = false } diff --git a/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs b/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs new file mode 100644 index 00000000000..c86e452e693 --- /dev/null +++ b/client/tests/integration/smartcontracts/query_assets_and_save_cursor/src/lib.rs @@ -0,0 +1,38 @@ +//! Smart contract which executes [`FindAllAssets`] and saves cursor to the owner's metadata. + +#![no_std] + +#[cfg(not(test))] +extern crate panic_halt; + +extern crate alloc; + +use alloc::string::ToString as _; +use core::num::NonZeroU32; + +use iroha_smart_contract::{parse, prelude::*}; +use lol_alloc::{FreeListAllocator, LockedAllocator}; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); + +/// Execute [`FindAllAssets`] and save cursor to the owner's metadata. +#[iroha_smart_contract::main] +fn main(owner: AccountId) { + let asset_cursor = FindAllAssets + .fetch_size(FetchSize::new(Some(NonZeroU32::try_from(1).dbg_unwrap()))) + .execute() + .dbg_unwrap(); + + let (_batch, cursor) = asset_cursor.into_raw_parts(); + + SetKeyValueExpr::new( + owner, + parse!("cursor" as Name), + serde_json::to_value(cursor) + .dbg_expect("Failed to convert cursor to JSON") + .to_string(), + ) + .execute() + .dbg_expect("Failed to save cursor to the owner's metadata"); +} diff --git a/client/tests/integration/smartcontracts/validator_with_admin/src/lib.rs b/client/tests/integration/smartcontracts/validator_with_admin/src/lib.rs deleted file mode 100644 index cbc6d280537..00000000000 --- a/client/tests/integration/smartcontracts/validator_with_admin/src/lib.rs +++ /dev/null @@ -1,166 +0,0 @@ -//! Runtime Validator which allows any instruction executed by `admin@admin` account. -//! If authority is not `admin@admin` then default validation is used as a backup. - -#![no_std] -#![allow(missing_docs, clippy::missing_errors_doc)] - -use iroha_validator::{ - data_model::evaluate::{EvaluationError, ExpressionEvaluator}, - iroha_wasm, parse, - prelude::*, -}; - -#[cfg(not(test))] -extern crate panic_halt; - -struct Validator { - verdict: Result, - block_height: u64, - host: iroha_wasm::Host, -} - -impl Validator { - /// Construct [`Self`] - pub fn new(block_height: u64) -> Self { - Self { - verdict: Ok(()), - block_height, - host: iroha_wasm::Host, - } - } -} - -macro_rules! defaults { - ( $($validator:ident $(<$param:ident $(: $bound:path)?>)?($operation:ty)),+ $(,)? ) => { $( - fn $validator $(<$param $(: $bound)?>)?(&mut self, authority: &AccountId, operation: $operation) { - iroha_validator::default::$validator(self, authority, operation) - } )+ - }; -} - -impl Visit for Validator { - fn visit_instruction(&mut self, authority: &AccountId, isi: &InstructionBox) { - if parse!("admin@admin" as AccountId) == *authority { - pass!(self); - } - - iroha_validator::default::visit_instruction(self, authority, isi); - } - - defaults! { - visit_unsupported(T), - - visit_transaction(&VersionedSignedTransaction), - visit_expression(&EvaluatesTo), - visit_sequence(&SequenceBox), - visit_if(&Conditional), - visit_pair(&Pair), - - // Peer validation - visit_unregister_peer(Unregister), - - // Domain validation - visit_unregister_domain(Unregister), - visit_set_domain_key_value(SetKeyValue), - visit_remove_domain_key_value(RemoveKeyValue), - - // Account validation - visit_unregister_account(Unregister), - visit_mint_account_public_key(Mint), - visit_burn_account_public_key(Burn), - visit_mint_account_signature_check_condition(Mint), - visit_set_account_key_value(SetKeyValue), - visit_remove_account_key_value(RemoveKeyValue), - - // Asset validation - visit_register_asset(Register), - visit_unregister_asset(Unregister), - visit_mint_asset(Mint), - visit_burn_asset(Burn), - visit_transfer_asset(Transfer), - visit_set_asset_key_value(SetKeyValue), - visit_remove_asset_key_value(RemoveKeyValue), - - // AssetDefinition validation - visit_unregister_asset_definition(Unregister), - visit_transfer_asset_definition(Transfer), - visit_set_asset_definition_key_value(SetKeyValue), - visit_remove_asset_definition_key_value(RemoveKeyValue), - - // Permission validation - visit_grant_account_permission(Grant), - visit_revoke_account_permission(Revoke), - - // Role validation - visit_register_role(Register), - visit_unregister_role(Unregister), - visit_grant_account_role(Grant), - visit_revoke_account_role(Revoke), - - // Trigger validation - visit_unregister_trigger(Unregister>), - visit_mint_trigger_repetitions(Mint, u32>), - visit_execute_trigger(ExecuteTrigger), - - // Parameter validation - visit_set_parameter(SetParameter), - visit_new_parameter(NewParameter), - - // Upgrade validation - visit_upgrade_validator(Upgrade), - } -} - -impl Validate for Validator { - fn verdict(&self) -> &Result { - &self.verdict - } - - fn block_height(&self) -> u64 { - self.block_height - } - - fn deny(&mut self, reason: ValidationFail) { - self.verdict = Err(reason); - } -} - -impl ExpressionEvaluator for Validator { - fn evaluate(&self, expression: &E) -> Result { - self.host.evaluate(expression) - } -} - -#[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { - Ok(()) -} - -#[entrypoint] -pub fn validate_transaction( - authority: AccountId, - transaction: VersionedSignedTransaction, - block_height: u64, -) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_transaction(&authority, &transaction); - validator.verdict -} - -#[entrypoint] -pub fn validate_instruction( - authority: AccountId, - instruction: InstructionBox, - block_height: u64, -) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_instruction(&authority, &instruction); - validator.verdict -} - -#[entrypoint] -pub fn validate_query(authority: AccountId, query: QueryBox, block_height: u64) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_query(&authority, &query); - validator.verdict -} diff --git a/client/tests/integration/smartcontracts/validator_with_custom_token/src/lib.rs b/client/tests/integration/smartcontracts/validator_with_custom_token/src/lib.rs deleted file mode 100644 index 660a5d076db..00000000000 --- a/client/tests/integration/smartcontracts/validator_with_custom_token/src/lib.rs +++ /dev/null @@ -1,330 +0,0 @@ -//! Runtime Validator which allows domain (un-)registration only for users who own -//! [`token::CanControlDomainLives`] permission token. -//! -//! This validator should be applied on top of the blockchain with default validation. -//! -//! It also doesn't have [`iroha_validator::default::domain::tokens::CanUnregisterDomain`]. -//! -//! In migration it replaces [`iroha_validator::default::domain::tokens::CanUnregisterDomain`] -//! with [`token::CanControlDomainLives`] for all accounts. -//! So it doesn't matter which domain user was able to unregister before migration, they will -//! get access to control all domains. Remember that this is just a test example. - -#![no_std] -#![allow(missing_docs, clippy::missing_errors_doc)] - -extern crate alloc; - -use alloc::string::String; - -use anyhow::anyhow; -use iroha_schema::IntoSchema; -use iroha_validator::{ - data_model::evaluate::{EvaluationError, ExpressionEvaluator}, - default::default_permission_token_schema, - iroha_wasm, - permission::Token as _, - prelude::*, -}; -use parity_scale_codec::{Decode, Encode}; -use serde::{Deserialize, Serialize}; -use serde_json::json; - -#[cfg(not(test))] -extern crate panic_halt; - -use alloc::format; - -mod token { - //! Module with custom token. - - use super::*; - - /// Token to identify if user can (un-)register domains. - #[derive( - PartialEq, - Eq, - Token, - ValidateGrantRevoke, - Decode, - Encode, - IntoSchema, - Serialize, - Deserialize, - )] - #[validate(iroha_validator::permission::OnlyGenesis)] - pub struct CanControlDomainLives; -} - -struct Validator { - verdict: Result, - block_height: u64, - host: iroha_wasm::Host, -} - -impl Validator { - /// Construct [`Self`] - pub fn new(block_height: u64) -> Self { - Self { - verdict: Ok(()), - block_height, - host: iroha_wasm::Host, - } - } - - fn get_all_accounts_with_can_unregister_domain_permission( - ) -> Result, MigrationError> { - let accounts = FindAllAccounts.execute().map_err(|error| { - format!("{:?}", anyhow!(error).context("Failed to get all accounts")) - })?; - - let mut found_accounts = Vec::new(); - - for account in accounts { - let permission_tokens = FindPermissionTokensByAccountId::new(account.id().clone()) - .execute() - .map_err(|error| { - format!( - "{:?}", - anyhow!(error).context(format!( - "Failed to get permissions for account `{}`", - account.id() - )) - ) - })?; - - for token in permission_tokens { - if let Ok(can_unregister_domain_token) = - iroha_validator::default::domain::tokens::CanUnregisterDomain::try_from(token) - { - found_accounts.push((account, can_unregister_domain_token.domain_id)); - break; - } - } - } - - Ok(found_accounts) - } - - fn replace_token(accounts: &[(Account, DomainId)]) -> MigrationResult { - let can_unregister_domain_definition_id = PermissionTokenId::try_from( - iroha_validator::default::domain::tokens::CanUnregisterDomain::type_name(), - ) - .unwrap(); - - let can_control_domain_lives_definition_id = - PermissionTokenId::try_from(token::CanControlDomainLives::type_name()).unwrap(); - - accounts - .iter() - .try_for_each(|(account, domain_id)| { - RevokeBox::new( - PermissionToken::new( - can_unregister_domain_definition_id.clone(), - &json!({ "domain_id": domain_id }), - ), - account.id().clone(), - ) - .execute() - .map_err(|error| { - format!( - "{:?}", - anyhow!(error).context(format!( - "Failed to revoke `{}` token from account `{}`", - can_unregister_domain_definition_id, - account.id() - )) - ) - })?; - - GrantBox::new( - PermissionToken::new( - can_control_domain_lives_definition_id.clone(), - &json!(null), - ), - account.id().clone(), - ) - .execute() - .map_err(|error| { - format!( - "{:?}", - anyhow!(error).context(format!( - "Failed to grant `{}` token from account `{}`", - can_control_domain_lives_definition_id, - account.id() - )) - ) - }) - }) - .map_err(|error| { - iroha_validator::iroha_wasm::error!(&error); - format!( - "{:?}", - anyhow!(error).context(format!( - "Failed to replace `{}` token with `{}` for accounts", - can_unregister_domain_definition_id, can_control_domain_lives_definition_id, - )) - ) - }) - } -} - -macro_rules! defaults { - ( $($validator:ident $(<$param:ident $(: $bound:path)?>)?($operation:ty)),+ $(,)? ) => { $( - fn $validator $(<$param $(: $bound)?>)?(&mut self, authority: &AccountId, operation: $operation) { - iroha_validator::default::$validator(self, authority, operation) - } )+ - }; -} - -impl Visit for Validator { - fn visit_register_domain(&mut self, authority: &AccountId, _isi: Register) { - if self.block_height() == 0 { - pass!(self); - } - if token::CanControlDomainLives.is_owned_by(authority) { - pass!(self); - } - - deny!(self, "You don't have permission to register a new domain"); - } - - fn visit_unregister_domain(&mut self, authority: &AccountId, _isi: Unregister) { - if self.block_height() == 0 { - pass!(self); - } - if token::CanControlDomainLives.is_owned_by(authority) { - pass!(self); - } - - deny!(self, "You don't have permission to unregister domain"); - } - - defaults! { - visit_unsupported(T), - - visit_transaction(&VersionedSignedTransaction), - visit_instruction(&InstructionBox), - visit_expression(&EvaluatesTo), - visit_sequence(&SequenceBox), - visit_if(&Conditional), - visit_pair(&Pair), - - // Peer validation - visit_unregister_peer(Unregister), - - // Domain validation - visit_set_domain_key_value(SetKeyValue), - visit_remove_domain_key_value(RemoveKeyValue), - - // Account validation - visit_unregister_account(Unregister), - visit_mint_account_public_key(Mint), - visit_burn_account_public_key(Burn), - visit_mint_account_signature_check_condition(Mint), - visit_set_account_key_value(SetKeyValue), - visit_remove_account_key_value(RemoveKeyValue), - - // Asset validation - visit_register_asset(Register), - visit_unregister_asset(Unregister), - visit_mint_asset(Mint), - visit_burn_asset(Burn), - visit_transfer_asset(Transfer), - visit_set_asset_key_value(SetKeyValue), - visit_remove_asset_key_value(RemoveKeyValue), - - // AssetDefinition validation - visit_unregister_asset_definition(Unregister), - visit_transfer_asset_definition(Transfer), - visit_set_asset_definition_key_value(SetKeyValue), - visit_remove_asset_definition_key_value(RemoveKeyValue), - - // Permission validation - visit_grant_account_permission(Grant), - visit_revoke_account_permission(Revoke), - - // Role validation - visit_register_role(Register), - visit_unregister_role(Unregister), - visit_grant_account_role(Grant), - visit_revoke_account_role(Revoke), - - // Trigger validation - visit_unregister_trigger(Unregister>), - visit_mint_trigger_repetitions(Mint, u32>), - visit_execute_trigger(ExecuteTrigger), - - // Parameter validation - visit_set_parameter(SetParameter), - visit_new_parameter(NewParameter), - - // Upgrade validation - visit_upgrade_validator(Upgrade), - } -} - -impl Validate for Validator { - fn verdict(&self) -> &Result { - &self.verdict - } - - fn block_height(&self) -> u64 { - self.block_height - } - - fn deny(&mut self, reason: ValidationFail) { - self.verdict = Err(reason); - } -} - -impl ExpressionEvaluator for Validator { - fn evaluate(&self, expression: &E) -> Result { - self.host.evaluate(expression) - } -} - -#[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { - let accounts = Validator::get_all_accounts_with_can_unregister_domain_permission()?; - - let mut schema = default_permission_token_schema(); - schema.remove::(); - schema.insert::(); - - let (token_ids, schema_str) = schema.serialize(); - iroha_validator::iroha_wasm::set_permission_token_schema( - &iroha_validator::data_model::permission::PermissionTokenSchema::new(token_ids, schema_str), - ); - - Validator::replace_token(&accounts) -} - -#[entrypoint] -pub fn validate_transaction( - authority: AccountId, - transaction: VersionedSignedTransaction, - block_height: u64, -) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_transaction(&authority, &transaction); - validator.verdict -} - -#[entrypoint] -pub fn validate_instruction( - authority: AccountId, - instruction: InstructionBox, - block_height: u64, -) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_instruction(&authority, &instruction); - validator.verdict -} - -#[entrypoint] -pub fn validate_query(authority: AccountId, query: QueryBox, block_height: u64) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_query(&authority, &query); - validator.verdict -} diff --git a/client/tests/integration/smartcontracts/validator_with_migration_fail/src/lib.rs b/client/tests/integration/smartcontracts/validator_with_migration_fail/src/lib.rs deleted file mode 100644 index 859dc13b801..00000000000 --- a/client/tests/integration/smartcontracts/validator_with_migration_fail/src/lib.rs +++ /dev/null @@ -1,184 +0,0 @@ -//! Runtime Validator which copies default validation logic but forbids any queries and fails to migrate. - -#![no_std] -#![allow(missing_docs, clippy::missing_errors_doc)] - -extern crate alloc; -#[cfg(not(test))] -extern crate panic_halt; - -use alloc::{borrow::ToOwned as _, format}; - -use anyhow::anyhow; -use iroha_validator::{ - data_model::{ - evaluate::{EvaluationError, ExpressionEvaluator}, - ValidationFail, - }, - iroha_wasm, parse, - prelude::*, -}; - -struct Validator { - verdict: Result, - block_height: u64, - host: iroha_wasm::Host, -} - -impl Validator { - /// Construct [`Self`] - pub fn new(block_height: u64) -> Self { - Self { - verdict: Ok(()), - block_height, - host: iroha_wasm::Host, - } - } -} - -macro_rules! defaults { - ( $($validator:ident $(<$param:ident $(: $bound:path)?>)?($operation:ty)),+ $(,)? ) => { $( - fn $validator $(<$param $(: $bound)?>)?(&mut self, authority: &AccountId, operation: $operation) { - iroha_validator::default::$validator(self, authority, operation) - } )+ - }; -} - -impl Visit for Validator { - fn visit_query(&mut self, _authority: &AccountId, _query: &QueryBox) { - self.deny(ValidationFail::NotPermitted( - "All queries are forbidden".to_owned(), - )); - } - - defaults! { - visit_unsupported(T), - - visit_transaction(&VersionedSignedTransaction), - visit_instruction(&InstructionBox), - visit_expression(&EvaluatesTo), - visit_sequence(&SequenceBox), - visit_if(&Conditional), - visit_pair(&Pair), - - // Peer validation - visit_unregister_peer(Unregister), - - // Domain validation - visit_unregister_domain(Unregister), - visit_set_domain_key_value(SetKeyValue), - visit_remove_domain_key_value(RemoveKeyValue), - - // Account validation - visit_unregister_account(Unregister), - visit_mint_account_public_key(Mint), - visit_burn_account_public_key(Burn), - visit_mint_account_signature_check_condition(Mint), - visit_set_account_key_value(SetKeyValue), - visit_remove_account_key_value(RemoveKeyValue), - - // Asset validation - visit_register_asset(Register), - visit_unregister_asset(Unregister), - visit_mint_asset(Mint), - visit_burn_asset(Burn), - visit_transfer_asset(Transfer), - visit_set_asset_key_value(SetKeyValue), - visit_remove_asset_key_value(RemoveKeyValue), - - // AssetDefinition validation - visit_unregister_asset_definition(Unregister), - visit_transfer_asset_definition(Transfer), - visit_set_asset_definition_key_value(SetKeyValue), - visit_remove_asset_definition_key_value(RemoveKeyValue), - - // Permission validation - visit_grant_account_permission(Grant), - visit_revoke_account_permission(Revoke), - - // Role validation - visit_register_role(Register), - visit_unregister_role(Unregister), - visit_grant_account_role(Grant), - visit_revoke_account_role(Revoke), - - // Trigger validation - visit_unregister_trigger(Unregister>), - visit_mint_trigger_repetitions(Mint, u32>), - visit_execute_trigger(ExecuteTrigger), - - // Parameter validation - visit_set_parameter(SetParameter), - visit_new_parameter(NewParameter), - - // Upgrade validation - visit_upgrade_validator(Upgrade), - } -} - -impl Validate for Validator { - fn verdict(&self) -> &Result { - &self.verdict - } - - fn block_height(&self) -> u64 { - self.block_height - } - - fn deny(&mut self, reason: ValidationFail) { - self.verdict = Err(reason); - } -} - -impl ExpressionEvaluator for Validator { - fn evaluate(&self, expression: &E) -> Result { - self.host.evaluate(expression) - } -} - -#[entrypoint] -pub fn migrate(_block_height: u64) -> MigrationResult { - // Performing side-effects to check in the test that it won't be applied after failure - - // Registering a new domain (using ISI) - let domain_id = parse!("failed_migration_test_domain" as DomainId); - RegisterBox::new(Domain::new(domain_id)) - .execute() - .map_err(|error| { - format!( - "{:?}", - anyhow!(error).context("Failed to register test domain") - ) - })?; - - Err("This validator always fails to migrate".to_owned()) -} - -#[entrypoint] -pub fn validate_transaction( - authority: AccountId, - transaction: VersionedSignedTransaction, - block_height: u64, -) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_transaction(&authority, &transaction); - validator.verdict -} - -#[entrypoint] -pub fn validate_instruction( - authority: AccountId, - instruction: InstructionBox, - block_height: u64, -) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_instruction(&authority, &instruction); - validator.verdict -} - -#[entrypoint] -pub fn validate_query(authority: AccountId, query: QueryBox, block_height: u64) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_query(&authority, &query); - validator.verdict -} diff --git a/client/tests/integration/sorting.rs b/client/tests/integration/sorting.rs index bed460847a9..a4452c3e241 100644 --- a/client/tests/integration/sorting.rs +++ b/client/tests/integration/sorting.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction, clippy::pedantic)] - use std::{ collections::HashSet, num::{NonZeroU32, NonZeroU64}, @@ -46,8 +44,8 @@ fn correct_pagination_assets_after_creating_new_one() { assets.push(asset.clone()); - let create_asset_definition = RegisterBox::new(asset_definition); - let create_asset = RegisterBox::new(asset); + let create_asset_definition = RegisterExpr::new(asset_definition); + let create_asset = RegisterExpr::new(asset); instructions.push(create_asset_definition); instructions.push(create_asset); @@ -60,14 +58,13 @@ fn correct_pagination_assets_after_creating_new_one() { let sorting = Sorting::by_metadata_key(sort_by_metadata_key.clone()); let res = test_client - .request_with_pagination_and_sorting( - client::asset::by_account_id(account_id.clone()), - Pagination { - limit: NonZeroU32::new(5), - start: None, - }, - sorting.clone(), - ) + .build_query(client::asset::by_account_id(account_id.clone())) + .with_pagination(Pagination { + limit: NonZeroU32::new(5), + start: None, + }) + .with_sorting(sorting.clone()) + .execute() .expect("Valid") .collect::>>() .expect("Valid"); @@ -95,22 +92,21 @@ fn correct_pagination_assets_after_creating_new_one() { AssetValue::Store(new_asset_metadata), ); - let create_asset_definition = RegisterBox::new(new_asset_definition); - let create_asset = RegisterBox::new(new_asset.clone()); + let create_asset_definition = RegisterExpr::new(new_asset_definition); + let create_asset = RegisterExpr::new(new_asset.clone()); test_client .submit_all_blocking([create_asset_definition, create_asset]) .expect("Valid"); let res = test_client - .request_with_pagination_and_sorting( - client::asset::by_account_id(account_id), - Pagination { - limit: NonZeroU32::new(13), - start: NonZeroU64::new(8), - }, - sorting, - ) + .build_query(client::asset::by_account_id(account_id)) + .with_pagination(Pagination { + limit: NonZeroU32::new(13), + start: NonZeroU64::new(8), + }) + .with_sorting(sorting) + .execute() .expect("Valid") .collect::>>() .expect("Valid"); @@ -126,6 +122,7 @@ fn correct_pagination_assets_after_creating_new_one() { } #[test] +#[allow(clippy::too_many_lines)] fn correct_sorting_of_entities() { let (_rt, _peer, test_client) = ::new().with_port(10_640).start_with_runtime(); @@ -134,7 +131,7 @@ fn correct_sorting_of_entities() { // Test sorting asset definitions let mut asset_definitions = vec![]; - let mut assets_metadata = vec![]; + let mut metadata_of_assets = vec![]; let mut instructions = vec![]; let n = 10u128; for i in 0..n { @@ -151,10 +148,10 @@ fn correct_sorting_of_entities() { let asset_definition = AssetDefinition::quantity(asset_definition_id.clone()) .with_metadata(asset_metadata.clone()); - assets_metadata.push(asset_metadata); + metadata_of_assets.push(asset_metadata); asset_definitions.push(asset_definition_id); - let create_asset_definition = RegisterBox::new(asset_definition); + let create_asset_definition = RegisterExpr::new(asset_definition); instructions.push(create_asset_definition); } @@ -163,13 +160,12 @@ fn correct_sorting_of_entities() { .expect("Valid"); let res = test_client - .request_with_filter_and_sorting( - client::asset::all_definitions(), - Sorting::by_metadata_key(sort_by_metadata_key.clone()), - PredicateBox::new(value::ValuePredicate::Identifiable( - string::StringPredicate::starts_with("xor_"), - )), - ) + .build_query(client::asset::all_definitions()) + .with_sorting(Sorting::by_metadata_key(sort_by_metadata_key.clone())) + .with_filter(PredicateBox::new(value::ValuePredicate::Identifiable( + string::StringPredicate::starts_with("xor_"), + ))) + .execute() .expect("Valid") .collect::>>() .expect("Valid"); @@ -180,13 +176,13 @@ fn correct_sorting_of_entities() { .eq(asset_definitions.iter().rev())); assert!(res .iter() - .map(|asset_definition| asset_definition.metadata()) - .eq(assets_metadata.iter().rev())); + .map(AssetDefinition::metadata) + .eq(metadata_of_assets.iter().rev())); // Test sorting accounts let mut accounts = vec![]; - let mut accounts_metadata = vec![]; + let mut metadata_of_accounts = vec![]; let mut instructions = vec![]; let n = 10u32; @@ -203,9 +199,9 @@ fn correct_sorting_of_entities() { let account = Account::new(account_id.clone(), []).with_metadata(account_metadata.clone()); accounts.push(account_id); - accounts_metadata.push(account_metadata); + metadata_of_accounts.push(account_metadata); - let create_account = RegisterBox::new(account); + let create_account = RegisterExpr::new(account); instructions.push(create_account); } @@ -214,13 +210,12 @@ fn correct_sorting_of_entities() { .expect("Valid"); let res = test_client - .request_with_filter_and_sorting( - client::account::all(), - Sorting::by_metadata_key(sort_by_metadata_key.clone()), - PredicateBox::new(value::ValuePredicate::Identifiable( - string::StringPredicate::starts_with("charlie"), - )), - ) + .build_query(client::account::all()) + .with_sorting(Sorting::by_metadata_key(sort_by_metadata_key.clone())) + .with_filter(PredicateBox::new(value::ValuePredicate::Identifiable( + string::StringPredicate::starts_with("charlie"), + ))) + .execute() .expect("Valid") .collect::>>() .expect("Valid"); @@ -228,13 +223,13 @@ fn correct_sorting_of_entities() { assert!(res.iter().map(Identifiable::id).eq(accounts.iter().rev())); assert!(res .iter() - .map(|account| account.metadata()) - .eq(accounts_metadata.iter().rev())); + .map(Account::metadata) + .eq(metadata_of_accounts.iter().rev())); // Test sorting domains let mut domains = vec![]; - let mut domains_metadata = vec![]; + let mut metadata_of_domains = vec![]; let mut instructions = vec![]; let n = 10u32; for i in 0..n { @@ -250,9 +245,9 @@ fn correct_sorting_of_entities() { let domain = Domain::new(domain_id.clone()).with_metadata(domain_metadata.clone()); domains.push(domain_id); - domains_metadata.push(domain_metadata); + metadata_of_domains.push(domain_metadata); - let create_account = RegisterBox::new(domain); + let create_account = RegisterExpr::new(domain); instructions.push(create_account); } @@ -261,14 +256,12 @@ fn correct_sorting_of_entities() { .expect("Valid"); let res = test_client - .request_with_filter_and_pagination_and_sorting( - client::domain::all(), - Pagination::default(), - Sorting::by_metadata_key(sort_by_metadata_key.clone()), - PredicateBox::new(value::ValuePredicate::Identifiable( - string::StringPredicate::starts_with("neverland"), - )), - ) + .build_query(client::domain::all()) + .with_sorting(Sorting::by_metadata_key(sort_by_metadata_key.clone())) + .with_filter(PredicateBox::new(value::ValuePredicate::Identifiable( + string::StringPredicate::starts_with("neverland"), + ))) + .execute() .expect("Valid") .collect::>>() .expect("Valid"); @@ -276,13 +269,13 @@ fn correct_sorting_of_entities() { assert!(res.iter().map(Identifiable::id).eq(domains.iter().rev())); assert!(res .iter() - .map(|domain| domain.metadata()) - .eq(domains_metadata.iter().rev())); + .map(Domain::metadata) + .eq(metadata_of_domains.iter().rev())); // Naive test sorting of domains let input = [(0i32, 1u128), (2, 0), (1, 2)]; let mut domains = vec![]; - let mut domains_metadata = vec![]; + let mut metadata_of_domains = vec![]; let mut instructions = vec![]; for (idx, val) in input { let domain_id = DomainId::from_str(&format!("neverland_{idx}")).expect("Valid"); @@ -297,9 +290,9 @@ fn correct_sorting_of_entities() { let domain = Domain::new(domain_id.clone()).with_metadata(domain_metadata.clone()); domains.push(domain_id); - domains_metadata.push(domain_metadata); + metadata_of_domains.push(domain_metadata); - let create_account = RegisterBox::new(domain); + let create_account = RegisterExpr::new(domain); instructions.push(create_account); } test_client @@ -310,12 +303,10 @@ fn correct_sorting_of_entities() { string::StringPredicate::starts_with("neverland_"), )); let res = test_client - .request_with_filter_and_pagination_and_sorting( - client::domain::all(), - Pagination::default(), - Sorting::by_metadata_key(sort_by_metadata_key), - filter, - ) + .build_query(client::domain::all()) + .with_sorting(Sorting::by_metadata_key(sort_by_metadata_key)) + .with_filter(filter) + .execute() .expect("Valid") .collect::>>() .expect("Valid"); @@ -323,9 +314,9 @@ fn correct_sorting_of_entities() { assert_eq!(res[0].id(), &domains[1]); assert_eq!(res[1].id(), &domains[0]); assert_eq!(res[2].id(), &domains[2]); - assert_eq!(res[0].metadata(), &domains_metadata[1]); - assert_eq!(res[1].metadata(), &domains_metadata[0]); - assert_eq!(res[2].metadata(), &domains_metadata[2]); + assert_eq!(res[0].metadata(), &metadata_of_domains[1]); + assert_eq!(res[1].metadata(), &metadata_of_domains[0]); + assert_eq!(res[2].metadata(), &metadata_of_domains[2]); } #[test] @@ -345,7 +336,11 @@ fn sort_only_elements_which_have_sorting_key() -> Result<()> { let n = 10u32; for i in 0..n { let account_id = AccountId::from_str(&format!("charlie{i}@wonderland")).expect("Valid"); - let account = if !skip_set.contains(&i) { + let account = if skip_set.contains(&i) { + let account = Account::new(account_id.clone(), []); + accounts_b.push(account_id); + account + } else { let mut account_metadata = Metadata::new(); account_metadata .insert_with_limits( @@ -357,13 +352,9 @@ fn sort_only_elements_which_have_sorting_key() -> Result<()> { let account = Account::new(account_id.clone(), []).with_metadata(account_metadata); accounts_a.push(account_id); account - } else { - let account = Account::new(account_id.clone(), []); - accounts_b.push(account_id); - account }; - let create_account = RegisterBox::new(account); + let create_account = RegisterExpr::new(account); instructions.push(create_account); } @@ -372,13 +363,12 @@ fn sort_only_elements_which_have_sorting_key() -> Result<()> { .wrap_err("Failed to register accounts")?; let res = test_client - .request_with_filter_and_sorting( - client::account::all(), - Sorting::by_metadata_key(sort_by_metadata_key), - PredicateBox::new(value::ValuePredicate::Identifiable( - string::StringPredicate::starts_with("charlie"), - )), - ) + .build_query(client::account::all()) + .with_sorting(Sorting::by_metadata_key(sort_by_metadata_key)) + .with_filter(PredicateBox::new(value::ValuePredicate::Identifiable( + string::StringPredicate::starts_with("charlie"), + ))) + .execute() .wrap_err("Failed to submit request")? .collect::>>()?; diff --git a/client/tests/integration/transfer_asset.rs b/client/tests/integration/transfer_asset.rs index 674a1fa405e..3eb2ae5883e 100644 --- a/client/tests/integration/transfer_asset.rs +++ b/client/tests/integration/transfer_asset.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction, clippy::pedantic)] - use iroha_client::client::{self, QueryResult}; use iroha_crypto::KeyPair; use iroha_data_model::{prelude::*, Registered}; @@ -8,19 +6,19 @@ use test_network::*; #[test] fn simulate_transfer_quantity() { - simulate_transfer(200_u32, 20_u32, AssetDefinition::quantity, 10_710) + simulate_transfer(200_u32, &20_u32, AssetDefinition::quantity, 10_710) } #[test] fn simulate_transfer_big_quantity() { - simulate_transfer(200_u128, 20_u128, AssetDefinition::big_quantity, 10_785) + simulate_transfer(200_u128, &20_u128, AssetDefinition::big_quantity, 10_785) } #[test] fn simulate_transfer_fixed() { simulate_transfer( Fixed::try_from(200_f64).expect("Valid"), - Fixed::try_from(20_f64).expect("Valid"), + &Fixed::try_from(20_f64).expect("Valid"), AssetDefinition::fixed, 10_790, ) @@ -32,7 +30,7 @@ fn simulate_transfer_fixed() { fn simulate_insufficient_funds() { simulate_transfer( Fixed::try_from(20_f64).expect("Valid"), - Fixed::try_from(200_f64).expect("Valid"), + &Fixed::try_from(200_f64).expect("Valid"), AssetDefinition::fixed, 10_800, ) @@ -45,13 +43,13 @@ fn simulate_transfer< D: FnOnce(AssetDefinitionId) -> ::With, >( starting_amount: T, - amount_to_transfer: T, + amount_to_transfer: &T, value_type: D, port_number: u16, ) where Value: From, { - let (_rt, _peer, mut iroha_client) = ::new() + let (_rt, _peer, iroha_client) = ::new() .with_port(port_number) .start_with_runtime(); wait_for_genesis_committed(&[iroha_client.clone()], 0); @@ -61,15 +59,15 @@ fn simulate_transfer< let (bob_public_key, _) = KeyPair::generate() .expect("Failed to generate KeyPair") .into(); - let create_mouse = RegisterBox::new(Account::new(mouse_id.clone(), [bob_public_key])); + let create_mouse = RegisterExpr::new(Account::new(mouse_id.clone(), [bob_public_key])); let asset_definition_id: AssetDefinitionId = "camomile#wonderland".parse().expect("Valid"); - let create_asset = RegisterBox::new(value_type(asset_definition_id.clone())); - let mint_asset = MintBox::new( + let create_asset = RegisterExpr::new(value_type(asset_definition_id.clone())); + let mint_asset = MintExpr::new( starting_amount.to_value(), IdBox::AssetId(AssetId::new(asset_definition_id.clone(), alice_id.clone())), ); - let instructions: [InstructionBox; 3] = [ + let instructions: [InstructionExpr; 3] = [ // create_alice.into(), We don't need to register Alice, because she is created in genesis create_mouse.into(), create_asset.into(), @@ -80,7 +78,7 @@ fn simulate_transfer< .expect("Failed to prepare state."); //When - let transfer_asset = TransferBox::new( + let transfer_asset = TransferExpr::new( IdBox::AssetId(AssetId::new(asset_definition_id.clone(), alice_id)), amount_to_transfer.clone().to_value(), IdBox::AccountId(mouse_id.clone()), diff --git a/client/tests/integration/triggers/by_call_trigger.rs b/client/tests/integration/triggers/by_call_trigger.rs index 0608c82764b..d7602a54cdb 100644 --- a/client/tests/integration/triggers/by_call_trigger.rs +++ b/client/tests/integration/triggers/by_call_trigger.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{str::FromStr as _, sync::mpsc, thread, time::Duration}; use eyre::{eyre, Result, WrapErr}; @@ -7,7 +5,7 @@ use iroha_client::client::{self, Client}; use iroha_data_model::{ prelude::*, query::error::{FindError, QueryExecutionFail}, - trigger::OptimizedExecutable, + transaction::Executable, }; use iroha_genesis::GenesisNetwork; use iroha_logger::info; @@ -25,12 +23,12 @@ fn call_execute_trigger() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id); let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; - let instruction = MintBox::new(1_u32, asset_id.clone()); + let instruction = MintExpr::new(1_u32, asset_id.clone()); let register_trigger = build_register_trigger_isi(asset_id.clone(), vec![instruction.into()]); test_client.submit_blocking(register_trigger)?; let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; - let call_trigger = ExecuteTriggerBox::new(trigger_id); + let call_trigger = ExecuteTriggerExpr::new(trigger_id); test_client.submit_blocking(call_trigger)?; let new_value = get_asset_value(&mut test_client, asset_id)?; @@ -48,12 +46,12 @@ fn execute_trigger_should_produce_event() -> Result<()> { let account_id: AccountId = "alice@wonderland".parse()?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let instruction = MintBox::new(1_u32, asset_id.clone()); + let instruction = MintExpr::new(1_u32, asset_id.clone()); let register_trigger = build_register_trigger_isi(asset_id, vec![instruction.into()]); test_client.submit_blocking(register_trigger)?; let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; - let call_trigger = ExecuteTriggerBox::new(trigger_id.clone()); + let call_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); let thread_client = test_client.clone(); let (sender, receiver) = mpsc::channel(); @@ -83,11 +81,11 @@ fn infinite_recursion_should_produce_one_call_per_block() -> Result<()> { let account_id = "alice@wonderland".parse()?; let asset_id = AssetId::new(asset_definition_id, account_id); let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; - let call_trigger = ExecuteTriggerBox::new(trigger_id); + let call_trigger = ExecuteTriggerExpr::new(trigger_id); let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; let instructions = vec![ - MintBox::new(1_u32, asset_id.clone()).into(), + MintExpr::new(1_u32, asset_id.clone()).into(), call_trigger.clone().into(), ]; let register_trigger = build_register_trigger_isi(asset_id.clone(), instructions); @@ -113,8 +111,8 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { // Registering trigger that should fail on execution let bad_trigger_id = TriggerId::from_str("bad_trigger")?; // Invalid instruction - let bad_trigger_instructions = vec![MintBox::new(1_u32, account_id.clone())]; - let register_bad_trigger = RegisterBox::new(Trigger::new( + let bad_trigger_instructions = vec![MintExpr::new(1_u32, account_id.clone())]; + let register_bad_trigger = RegisterExpr::new(Trigger::new( bad_trigger_id.clone(), Action::new( bad_trigger_instructions, @@ -130,8 +128,8 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { // Registering normal trigger let trigger_id = TriggerId::from_str(TRIGGER_NAME)?; - let trigger_instructions = vec![MintBox::new(1_u32, asset_id.clone())]; - let register_trigger = RegisterBox::new(Trigger::new( + let trigger_instructions = vec![MintExpr::new(1_u32, asset_id.clone())]; + let register_trigger = RegisterExpr::new(Trigger::new( trigger_id, Action::new( trigger_instructions, @@ -147,7 +145,7 @@ fn trigger_failure_should_not_cancel_other_triggers_execution() -> Result<()> { let prev_asset_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing bad trigger - test_client.submit_blocking(ExecuteTriggerBox::new(bad_trigger_id))?; + test_client.submit_blocking(ExecuteTriggerExpr::new(bad_trigger_id))?; // Checking results let new_asset_value = get_asset_value(&mut test_client, asset_id)?; @@ -165,8 +163,8 @@ fn trigger_should_not_be_executed_with_zero_repeats_count() -> Result<()> { let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let trigger_id = TriggerId::from_str("self_modifying_trigger")?; - let trigger_instructions = vec![MintBox::new(1_u32, asset_id.clone())]; - let register_trigger = RegisterBox::new(Trigger::new( + let trigger_instructions = vec![MintExpr::new(1_u32, asset_id.clone())]; + let register_trigger = RegisterExpr::new(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, @@ -184,7 +182,7 @@ fn trigger_should_not_be_executed_with_zero_repeats_count() -> Result<()> { let prev_asset_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing trigger first time - let execute_trigger = ExecuteTriggerBox::new(trigger_id.clone()); + let execute_trigger = ExecuteTriggerExpr::new(trigger_id.clone()); test_client.submit_blocking(execute_trigger.clone())?; // Executing trigger second time @@ -224,10 +222,10 @@ fn trigger_should_be_able_to_modify_its_own_repeats_count() -> Result<()> { let trigger_id = TriggerId::from_str("self_modifying_trigger")?; let trigger_instructions = vec![ - MintBox::new(1_u32, trigger_id.clone()), - MintBox::new(1_u32, asset_id.clone()), + MintExpr::new(1_u32, trigger_id.clone()), + MintExpr::new(1_u32, asset_id.clone()), ]; - let register_trigger = RegisterBox::new(Trigger::new( + let register_trigger = RegisterExpr::new(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, @@ -245,7 +243,7 @@ fn trigger_should_be_able_to_modify_its_own_repeats_count() -> Result<()> { let prev_asset_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing trigger first time - let execute_trigger = ExecuteTriggerBox::new(trigger_id); + let execute_trigger = ExecuteTriggerExpr::new(trigger_id); test_client.submit_blocking(execute_trigger.clone())?; // Executing trigger second time @@ -270,7 +268,7 @@ fn unregister_trigger() -> Result<()> { let trigger = Trigger::new( trigger_id.clone(), Action::new( - Vec::::new(), + Vec::::new(), Repeats::Indefinitely, account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( @@ -279,7 +277,7 @@ fn unregister_trigger() -> Result<()> { )), ), ); - let register_trigger = RegisterBox::new(trigger.clone()); + let register_trigger = RegisterExpr::new(trigger.clone()); test_client.submit_blocking(register_trigger)?; // Finding trigger @@ -288,7 +286,7 @@ fn unregister_trigger() -> Result<()> { }; let found_trigger = test_client.request(find_trigger.clone())?; let found_action = found_trigger.action; - let OptimizedExecutable::Instructions(found_instructions) = found_action.executable else { + let Executable::Instructions(found_instructions) = found_action.executable else { panic!("Expected instructions"); }; let found_trigger = Trigger::new( @@ -303,7 +301,7 @@ fn unregister_trigger() -> Result<()> { assert_eq!(found_trigger, trigger); // Unregistering trigger - let unregister_trigger = UnregisterBox::new(trigger_id); + let unregister_trigger = UnregisterExpr::new(trigger_id); test_client.submit_blocking(unregister_trigger)?; // Checking result @@ -331,13 +329,15 @@ fn trigger_in_genesis_using_base64() -> Result<()> { info!("Building trigger"); let wasm = iroha_wasm_builder::Builder::new("tests/integration/smartcontracts/mint_rose_trigger") + .show_output() .build()? .optimize()? .into_bytes()?; info!("WASM size is {} bytes", wasm.len()); - let wasm_base64 = serde_json::json!(base64::encode(&wasm)).to_string(); + let engine = base64::engine::general_purpose::STANDARD; + let wasm_base64 = serde_json::json!(base64::engine::Engine::encode(&engine, wasm)).to_string(); let account_id = AccountId::from_str("alice@wonderland")?; let trigger_id = TriggerId::from_str("genesis_trigger")?; @@ -357,10 +357,10 @@ fn trigger_in_genesis_using_base64() -> Result<()> { // Registering trigger in genesis let mut genesis = GenesisNetwork::test(true).expect("Expected genesis"); - let VersionedSignedTransaction::V1(tx_ref) = &mut genesis.transactions[0].0; - match &mut tx_ref.payload.instructions { + let tx_ref = &mut genesis.transactions[0].0; + match &mut tx_ref.payload_mut().instructions { Executable::Instructions(instructions) => { - instructions.push(RegisterBox::new(trigger).into()); + instructions.push(RegisterExpr::new(trigger).into()); } Executable::Wasm(_) => panic!("Expected instructions"), } @@ -376,7 +376,7 @@ fn trigger_in_genesis_using_base64() -> Result<()> { let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing trigger - let call_trigger = ExecuteTriggerBox::new(trigger_id); + let call_trigger = ExecuteTriggerExpr::new(trigger_id); test_client.submit_blocking(call_trigger)?; // Checking result @@ -397,10 +397,10 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { let trigger_id_unregister = TriggerId::from_str("unregister_other_trigger")?; let trigger_id_should_be_unregistered = TriggerId::from_str("should_be_unregistered_trigger")?; - let trigger_unregister_instructions = vec![UnregisterBox::new( + let trigger_unregister_instructions = vec![UnregisterExpr::new( trigger_id_should_be_unregistered.clone(), )]; - let register_trigger = RegisterBox::new(Trigger::new( + let register_trigger = RegisterExpr::new(Trigger::new( trigger_id_unregister.clone(), Action::new( trigger_unregister_instructions, @@ -414,8 +414,8 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - let trigger_should_be_unregistered_instructions = vec![MintBox::new(1_u32, asset_id.clone())]; - let register_trigger = RegisterBox::new(Trigger::new( + let trigger_should_be_unregistered_instructions = vec![MintExpr::new(1_u32, asset_id.clone())]; + let register_trigger = RegisterExpr::new(Trigger::new( trigger_id_should_be_unregistered.clone(), Action::new( trigger_should_be_unregistered_instructions, @@ -433,9 +433,9 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { let prev_asset_value = get_asset_value(&mut test_client, asset_id.clone())?; // Executing triggers - let execute_trigger_unregister = ExecuteTriggerBox::new(trigger_id_unregister); + let execute_trigger_unregister = ExecuteTriggerExpr::new(trigger_id_unregister); let execute_trigger_should_be_unregistered = - ExecuteTriggerBox::new(trigger_id_should_be_unregistered); + ExecuteTriggerExpr::new(trigger_id_should_be_unregistered); test_client.submit_all_blocking([ execute_trigger_unregister, execute_trigger_should_be_unregistered, @@ -449,6 +449,42 @@ fn trigger_should_be_able_to_modify_other_trigger() -> Result<()> { Ok(()) } +#[test] +fn trigger_burn_repetitions() -> Result<()> { + let (_rt, _peer, test_client) = ::new().with_port(11_070).start_with_runtime(); + wait_for_genesis_committed(&vec![test_client.clone()], 0); + + let asset_definition_id = "rose#wonderland".parse()?; + let account_id = AccountId::from_str("alice@wonderland")?; + let asset_id = AssetId::new(asset_definition_id, account_id.clone()); + let trigger_id = TriggerId::from_str("trigger")?; + + let trigger_instructions = vec![MintExpr::new(1_u32, asset_id)]; + let register_trigger = RegisterExpr::new(Trigger::new( + trigger_id.clone(), + Action::new( + trigger_instructions, + Repeats::from(1_u32), + account_id.clone(), + TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( + trigger_id.clone(), + account_id, + )), + ), + )); + test_client.submit_blocking(register_trigger)?; + + test_client.submit_blocking(BurnExpr::new(1_u32, trigger_id.clone()))?; + + // Executing trigger + let execute_trigger = ExecuteTriggerExpr::new(trigger_id); + let _err = test_client + .submit_blocking(execute_trigger) + .expect_err("Should fail without repetitions"); + + Ok(()) +} + fn get_asset_value(client: &mut Client, asset_id: AssetId) -> Result { let asset = client.request(client::asset::by_id(asset_id))?; Ok(*TryAsRef::::try_as_ref(asset.value())?) @@ -456,11 +492,11 @@ fn get_asset_value(client: &mut Client, asset_id: AssetId) -> Result { fn build_register_trigger_isi( asset_id: AssetId, - trigger_instructions: Vec, -) -> RegisterBox { + trigger_instructions: Vec, +) -> RegisterExpr { let trigger_id: TriggerId = TRIGGER_NAME.parse().expect("Valid"); - RegisterBox::new(Trigger::new( + RegisterExpr::new(Trigger::new( trigger_id.clone(), Action::new( trigger_instructions, diff --git a/client/tests/integration/triggers/data_trigger.rs b/client/tests/integration/triggers/data_trigger.rs index 64975b25faf..a95c9ff4466 100644 --- a/client/tests/integration/triggers/data_trigger.rs +++ b/client/tests/integration/triggers/data_trigger.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use eyre::Result; use iroha_client::client; use iroha_data_model::prelude::*; @@ -7,17 +5,17 @@ use test_network::*; #[test] fn must_execute_both_triggers() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_650).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_650).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); let account_id: AccountId = "alice@wonderland".parse()?; let asset_definition_id = "rose#wonderland".parse()?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; + let prev_value = get_asset_value(&test_client, asset_id.clone())?; - let instruction = MintBox::new(1_u32, asset_id.clone()); - let register_trigger = RegisterBox::new(Trigger::new( + let instruction = MintExpr::new(1_u32, asset_id.clone()); + let register_trigger = RegisterExpr::new(Trigger::new( "mint_rose_1".parse()?, Action::new( [instruction.clone()], @@ -30,7 +28,7 @@ fn must_execute_both_triggers() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - let register_trigger = RegisterBox::new(Trigger::new( + let register_trigger = RegisterExpr::new(Trigger::new( "mint_rose_2".parse()?, Action::new( [instruction], @@ -43,13 +41,13 @@ fn must_execute_both_triggers() -> Result<()> { )); test_client.submit_blocking(register_trigger)?; - test_client.submit_blocking(RegisterBox::new(Account::new( + test_client.submit_blocking(RegisterExpr::new(Account::new( "bunny@wonderland".parse()?, [], )))?; - test_client.submit_blocking(RegisterBox::new(Domain::new("neverland".parse()?)))?; + test_client.submit_blocking(RegisterExpr::new(Domain::new("neverland".parse()?)))?; - let new_value = get_asset_value(&mut test_client, asset_id)?; + let new_value = get_asset_value(&test_client, asset_id)?; assert_eq!(new_value, prev_value + 2); Ok(()) @@ -57,21 +55,21 @@ fn must_execute_both_triggers() -> Result<()> { #[test] fn domain_scoped_trigger_must_be_executed_only_on_events_in_its_domain() -> Result<()> { - let (_rt, _peer, mut test_client) = ::new().with_port(10_655).start_with_runtime(); + let (_rt, _peer, test_client) = ::new().with_port(10_655).start_with_runtime(); wait_for_genesis_committed(&[test_client.clone()], 0); - let create_neverland_domain = RegisterBox::new(Domain::new("neverland".parse()?)); + let create_neverland_domain = RegisterExpr::new(Domain::new("neverland".parse()?)); let account_id: AccountId = "sapporo@neverland".parse()?; - let create_sapporo_account = RegisterBox::new(Account::new(account_id.clone(), [])); + let create_sapporo_account = RegisterExpr::new(Account::new(account_id.clone(), [])); let asset_definition_id: AssetDefinitionId = "sakura#neverland".parse()?; let create_sakura_asset_definition = - RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let create_sakura_asset = - RegisterBox::new(Asset::new(asset_id.clone(), AssetValue::Quantity(0))); + RegisterExpr::new(Asset::new(asset_id.clone(), AssetValue::Quantity(0))); test_client.submit_all_blocking([ create_neverland_domain, @@ -80,12 +78,12 @@ fn domain_scoped_trigger_must_be_executed_only_on_events_in_its_domain() -> Resu create_sakura_asset, ])?; - let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; + let prev_value = get_asset_value(&test_client, asset_id.clone())?; - let register_trigger = RegisterBox::new(Trigger::new( + let register_trigger = RegisterExpr::new(Trigger::new( "mint_sakura$neverland".parse()?, Action::new( - [MintBox::new(1_u32, asset_id.clone())], + [MintExpr::new(1_u32, asset_id.clone())], Repeats::Indefinitely, account_id, TriggeringFilterBox::Data(BySome(DataEntityFilter::ByAccount(BySome( @@ -95,23 +93,23 @@ fn domain_scoped_trigger_must_be_executed_only_on_events_in_its_domain() -> Resu )); test_client.submit_blocking(register_trigger)?; - test_client.submit_blocking(RegisterBox::new(Account::new( + test_client.submit_blocking(RegisterExpr::new(Account::new( "asahi@wonderland".parse()?, [], )))?; - test_client.submit_blocking(RegisterBox::new(Account::new( + test_client.submit_blocking(RegisterExpr::new(Account::new( "asahi@neverland".parse()?, [], )))?; - let new_value = get_asset_value(&mut test_client, asset_id)?; + let new_value = get_asset_value(&test_client, asset_id)?; assert_eq!(new_value, prev_value + 1); Ok(()) } -fn get_asset_value(client: &mut client::Client, asset_id: AssetId) -> Result { +fn get_asset_value(client: &client::Client, asset_id: AssetId) -> Result { let asset = client.request(client::asset::by_id(asset_id))?; Ok(*TryAsRef::::try_as_ref(asset.value())?) } diff --git a/client/tests/integration/triggers/event_trigger.rs b/client/tests/integration/triggers/event_trigger.rs index cdb3f1492b6..53a8aa55cd4 100644 --- a/client/tests/integration/triggers/event_trigger.rs +++ b/client/tests/integration/triggers/event_trigger.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::str::FromStr; use eyre::Result; @@ -13,12 +11,12 @@ fn test_mint_asset_when_new_asset_definition_created() -> Result<()> { wait_for_genesis_committed(&vec![test_client.clone()], 0); let asset_definition_id = "rose#wonderland".parse()?; - let account_id = ::Id::from_str("alice@wonderland")?; + let account_id = AccountId::from_str("alice@wonderland")?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); let prev_value = get_asset_value(&mut test_client, asset_id.clone())?; - let instruction = MintBox::new(1_u32, asset_id.clone()); - let register_trigger = RegisterBox::new(Trigger::new( + let instruction = MintExpr::new(1_u32, asset_id.clone()); + let register_trigger = RegisterExpr::new(Trigger::new( "mint_rose".parse()?, Action::new( vec![instruction], @@ -35,7 +33,7 @@ fn test_mint_asset_when_new_asset_definition_created() -> Result<()> { test_client.submit(register_trigger)?; let tea_definition_id = "tea#wonderland".parse()?; - let register_tea_definition = RegisterBox::new(AssetDefinition::quantity(tea_definition_id)); + let register_tea_definition = RegisterExpr::new(AssetDefinition::quantity(tea_definition_id)); test_client.submit_blocking(register_tea_definition)?; let new_value = get_asset_value(&mut test_client, asset_id)?; diff --git a/client/tests/integration/triggers/time_trigger.rs b/client/tests/integration/triggers/time_trigger.rs index c1dcfb2129c..959256b7f47 100644 --- a/client/tests/integration/triggers/time_trigger.rs +++ b/client/tests/integration/triggers/time_trigger.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{str::FromStr as _, time::Duration}; use eyre::Result; @@ -44,8 +42,8 @@ fn time_trigger_execution_count_error_should_be_less_than_15_percent() -> Result let schedule = TimeSchedule::starting_at(start_time).with_period(Duration::from_millis(PERIOD_MS)); - let instruction = MintBox::new(1_u32, asset_id.clone()); - let register_trigger = RegisterBox::new(Trigger::new( + let instruction = MintExpr::new(1_u32, asset_id.clone()); + let register_trigger = RegisterExpr::new(Trigger::new( "mint_rose".parse()?, Action::new( vec![instruction], @@ -92,15 +90,14 @@ fn change_asset_metadata_after_1_sec() -> Result<()> { // Start listening BEFORE submitting any transaction not to miss any block committed event let event_listener = get_block_committed_event_listener(&test_client)?; - let asset_definition_id = - ::Id::from_str("rose#wonderland").expect("Valid"); - let account_id = ::Id::from_str("alice@wonderland").expect("Valid"); + let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland").expect("Valid"); + let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); let key = Name::from_str("petal")?; let schedule = TimeSchedule::starting_at(start_time + Duration::from_millis(PERIOD_MS)); let instruction = - SetKeyValueBox::new(asset_definition_id.clone(), key.clone(), 3_u32.to_value()); - let register_trigger = RegisterBox::new(Trigger::new( + SetKeyValueExpr::new(asset_definition_id.clone(), key.clone(), 3_u32.to_value()); + let register_trigger = RegisterExpr::new(Trigger::new( "change_rose_metadata".parse().expect("Valid"), Action::new( vec![instruction], @@ -145,8 +142,8 @@ fn pre_commit_trigger_should_be_executed() -> Result<()> { // Start listening BEFORE submitting any transaction not to miss any block committed event let event_listener = get_block_committed_event_listener(&test_client)?; - let instruction = MintBox::new(1_u32, asset_id.clone()); - let register_trigger = RegisterBox::new(Trigger::new( + let instruction = MintExpr::new(1_u32, asset_id.clone()); + let register_trigger = RegisterExpr::new(Trigger::new( "mint_rose".parse()?, Action::new( vec![instruction], @@ -163,7 +160,7 @@ fn pre_commit_trigger_should_be_executed() -> Result<()> { prev_value = new_value; // ISI just to create a new block - let sample_isi = SetKeyValueBox::new( + let sample_isi = SetKeyValueExpr::new( account_id.clone(), "key".parse::()?, String::from("value"), @@ -182,9 +179,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { let (_rt, _peer, mut test_client) = ::new().with_port(10_780).start_with_runtime(); wait_for_genesis_committed(&vec![test_client.clone()], 0); - let alice_id = "alice@wonderland" - .parse::<::Id>() - .expect("Valid"); + let alice_id = "alice@wonderland".parse::().expect("Valid"); let accounts: Vec = vec![ alice_id.clone(), @@ -199,7 +194,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { .iter() .skip(1) // Alice has already been registered in genesis .cloned() - .map(|account_id| RegisterBox::new(Account::new(account_id, []))) + .map(|account_id| RegisterExpr::new(Account::new(account_id, []))) .collect::>(); test_client.submit_all_blocking(register_accounts)?; @@ -209,6 +204,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { let wasm = iroha_wasm_builder::Builder::new( "tests/integration/smartcontracts/create_nft_for_every_user_trigger", ) + .show_output() .build()? .optimize()? .into_bytes()?; @@ -222,7 +218,7 @@ fn mint_nft_for_every_user_every_1_sec() -> Result<()> { let start_time = current_time(); let schedule = TimeSchedule::starting_at(start_time).with_period(Duration::from_millis(TRIGGER_PERIOD_MS)); - let register_trigger = RegisterBox::new(Trigger::new( + let register_trigger = RegisterExpr::new(Trigger::new( "mint_nft_for_all".parse()?, Action::new( WasmSmartContract::from_compiled(wasm), @@ -297,7 +293,7 @@ fn submit_sample_isi_on_every_block_commit( for _ in block_committed_event_listener.take(times) { std::thread::sleep(timeout); // ISI just to create a new block - let sample_isi = SetKeyValueBox::new( + let sample_isi = SetKeyValueExpr::new( account_id.clone(), "key".parse::()?, String::from("value"), diff --git a/client/tests/integration/tx_history.rs b/client/tests/integration/tx_history.rs index af47084ca8f..3d85aa77740 100644 --- a/client/tests/integration/tx_history.rs +++ b/client/tests/integration/tx_history.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{ num::{NonZeroU32, NonZeroU64}, str::FromStr as _, @@ -24,14 +22,14 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> // Given let account_id = AccountId::from_str("alice@wonderland")?; let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); client.submit_blocking(create_asset)?; //When let quantity: u32 = 200; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - let mint_existed_asset = MintBox::new(quantity.to_value(), IdBox::AssetId(asset_id)); - let mint_not_existed_asset = MintBox::new( + let mint_existed_asset = MintExpr::new(quantity.to_value(), IdBox::AssetId(asset_id)); + let mint_not_existed_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( AssetDefinitionId::from_str("foo#wonderland")?, @@ -47,20 +45,19 @@ fn client_has_rejected_and_acepted_txs_should_return_tx_history() -> Result<()> } else { &mint_not_existed_asset }; - let instructions: Vec = vec![mint_asset.clone().into()]; + let instructions: Vec = vec![mint_asset.clone().into()]; let transaction = client.build_transaction(instructions, UnlimitedMetadata::new())?; client.submit_transaction(&transaction)?; } thread::sleep(pipeline_time * 5); let transactions = client - .request_with_pagination( - transaction::by_account_id(account_id.clone()), - Pagination { - limit: NonZeroU32::new(50), - start: NonZeroU64::new(1), - }, - )? + .build_query(transaction::by_account_id(account_id.clone())) + .with_pagination(Pagination { + limit: NonZeroU32::new(50), + start: NonZeroU64::new(1), + }) + .execute()? .collect::>>()?; assert_eq!(transactions.len(), 50); diff --git a/client/tests/integration/tx_rollback.rs b/client/tests/integration/tx_rollback.rs index e65037e9d7d..b7c03e2e20b 100644 --- a/client/tests/integration/tx_rollback.rs +++ b/client/tests/integration/tx_rollback.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::str::FromStr as _; use eyre::Result; @@ -16,16 +14,16 @@ fn client_sends_transaction_with_invalid_instruction_should_not_see_any_changes( let account_id = AccountId::from_str("alice@wonderland")?; let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland")?; let wrong_asset_definition_id = AssetDefinitionId::from_str("ksor#wonderland")?; - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id)); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)); let quantity: u32 = 200; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( wrong_asset_definition_id.clone(), account_id.clone(), )), ); - let instructions: [InstructionBox; 2] = [create_asset.into(), mint_asset.into()]; + let instructions: [InstructionExpr; 2] = [create_asset.into(), mint_asset.into()]; let _ = client.submit_all_blocking(instructions); //Then diff --git a/client/tests/integration/unregister_peer.rs b/client/tests/integration/unregister_peer.rs index 817e90ded4e..70d91345ea9 100644 --- a/client/tests/integration/unregister_peer.rs +++ b/client/tests/integration/unregister_peer.rs @@ -1,4 +1,3 @@ -#![allow(clippy::restriction)] use std::thread; use eyre::Result; @@ -17,43 +16,42 @@ use super::Configuration; #[test] fn unstable_network_stable_after_add_and_after_remove_peer() -> Result<()> { // Given a network - let (rt, network, mut genesis_client, pipeline_time, account_id, asset_definition_id) = init()?; + let (rt, network, genesis_client, pipeline_time, account_id, asset_definition_id) = init()?; wait_for_genesis_committed(&network.clients(), 0); // When assets are minted mint( &asset_definition_id, &account_id, - &mut genesis_client, + &genesis_client, pipeline_time, 100, )?; // and a new peer is registered - let (peer, mut peer_client) = rt.block_on(network.add_peer()); + let (peer, peer_client) = rt.block_on(network.add_peer()); // Then the new peer should already have the mint result. - check_assets(&mut peer_client, &account_id, &asset_definition_id, 100); + check_assets(&peer_client, &account_id, &asset_definition_id, 100); // Also, when a peer is unregistered - let remove_peer = UnregisterBox::new(IdBox::PeerId(peer.id.clone())); + let remove_peer = UnregisterExpr::new(IdBox::PeerId(peer.id.clone())); genesis_client.submit(remove_peer)?; thread::sleep(pipeline_time * 2); // We can mint without error. mint( &asset_definition_id, &account_id, - &mut genesis_client, + &genesis_client, pipeline_time, 200, )?; // Assets are increased on the main network. - check_assets(&mut genesis_client, &account_id, &asset_definition_id, 300); + check_assets(&genesis_client, &account_id, &asset_definition_id, 300); // But not on the unregistered peer's network. - check_assets(&mut peer_client, &account_id, &asset_definition_id, 100); + check_assets(&peer_client, &account_id, &asset_definition_id, 100); Ok(()) } -#[allow(clippy::expect_used)] fn check_assets( - iroha_client: &mut client::Client, + iroha_client: &client::Client, account_id: &AccountId, asset_definition_id: &AssetDefinitionId, quantity: u32, @@ -78,11 +76,11 @@ fn check_assets( fn mint( asset_definition_id: &AssetDefinitionId, account_id: &AccountId, - client: &mut client::Client, + client: &client::Client, pipeline_time: std::time::Duration, quantity: u32, ) -> Result { - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), @@ -95,7 +93,6 @@ fn mint( Ok(quantity) } -#[allow(clippy::expect_used)] fn init() -> Result<( tokio::runtime::Runtime, test_network::Network, @@ -110,13 +107,13 @@ fn init() -> Result<( let parameters = ParametersBuilder::new() .add_parameter(MAX_TRANSACTIONS_IN_BLOCK, 1u32)? .into_set_parameters(); - let create_domain = RegisterBox::new(Domain::new("domain".parse()?)); + let create_domain = RegisterExpr::new(Domain::new("domain".parse()?)); let account_id: AccountId = "account@domain".parse()?; let (public_key, _) = KeyPair::generate()?.into(); - let create_account = RegisterBox::new(Account::new(account_id.clone(), [public_key])); + let create_account = RegisterExpr::new(Account::new(account_id.clone(), [public_key])); let asset_definition_id: AssetDefinitionId = "xor#domain".parse()?; - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); - let instructions: [InstructionBox; 4] = [ + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); + let instructions: [InstructionExpr; 4] = [ parameters.into(), create_domain.into(), create_account.into(), diff --git a/client/tests/integration/unstable_network.rs b/client/tests/integration/unstable_network.rs index 8b2f911938c..f7f566b8aed 100644 --- a/client/tests/integration/unstable_network.rs +++ b/client/tests/integration/unstable_network.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use core::sync::atomic::Ordering; use std::thread; @@ -52,7 +50,7 @@ fn unstable_network( } let rt = Runtime::test(); // Given - let (network, mut iroha_client) = rt.block_on(async { + let (network, iroha_client) = rt.block_on(async { let mut configuration = Configuration::test(); configuration.sumeragi.max_transactions_in_block = MAX_TRANSACTIONS_IN_BLOCK; configuration.logger.max_log_level = Level::INFO.into(); @@ -68,10 +66,7 @@ fn unstable_network( ) .await .expect("Failed to init peers"); - let client = Client::test( - &network.genesis.api_address, - &network.genesis.telemetry_address, - ); + let client = Client::test(&network.genesis.api_address); (network, client) }); wait_for_genesis_committed(&network.clients(), n_offline_peers); @@ -80,7 +75,7 @@ fn unstable_network( let account_id: AccountId = "alice@wonderland".parse().expect("Valid"); let asset_definition_id: AssetDefinitionId = "camomile#wonderland".parse().expect("Valid"); - let register_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + let register_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); iroha_client .submit_blocking(register_asset) .expect("Failed to register asset"); @@ -102,7 +97,7 @@ fn unstable_network( } let quantity = 1; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new( asset_definition_id.clone(), diff --git a/client/tests/integration/upgrade.rs b/client/tests/integration/upgrade.rs index 368b34bcfcb..63490b138e9 100644 --- a/client/tests/integration/upgrade.rs +++ b/client/tests/integration/upgrade.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{path::Path, str::FromStr as _}; use eyre::Result; @@ -11,25 +9,25 @@ use serde_json::json; use test_network::*; #[test] -fn validator_upgrade_should_work() -> Result<()> { +fn executor_upgrade_should_work() -> Result<()> { let (_rt, _peer, client) = ::new().with_port(10_795).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); // Register `admin` domain and account let admin_domain = Domain::new("admin".parse()?); - let register_admin_domain = RegisterBox::new(admin_domain); + let register_admin_domain = RegisterExpr::new(admin_domain); client.submit_blocking(register_admin_domain)?; - let admin_id: ::Id = "admin@admin".parse()?; + let admin_id: AccountId = "admin@admin".parse()?; let admin_keypair = KeyPair::generate()?; let admin_account = Account::new(admin_id.clone(), [admin_keypair.public_key().clone()]); - let register_admin_account = RegisterBox::new(admin_account); + let register_admin_account = RegisterExpr::new(admin_account); client.submit_blocking(register_admin_account)?; // Check that admin isn't allowed to transfer alice's rose by default - let alice_rose: ::Id = "rose##alice@wonderland".parse()?; - let admin_rose: ::Id = "admin@admin".parse()?; - let transfer_alice_rose = TransferBox::new(alice_rose, NumericValue::U32(1), admin_rose); + let alice_rose: AssetId = "rose##alice@wonderland".parse()?; + let admin_rose: AccountId = "admin@admin".parse()?; + let transfer_alice_rose = TransferExpr::new(alice_rose, NumericValue::U32(1), admin_rose); let transfer_rose_tx = TransactionBuilder::new(admin_id.clone()) .with_instructions([transfer_alice_rose.clone()]) .sign(admin_keypair.clone())?; @@ -37,9 +35,9 @@ fn validator_upgrade_should_work() -> Result<()> { .submit_transaction_blocking(&transfer_rose_tx) .expect_err("Should fail"); - upgrade_validator( + upgrade_executor( &client, - "tests/integration/smartcontracts/validator_with_admin", + "tests/integration/smartcontracts/executor_with_admin", )?; // Check that admin can transfer alice's rose now @@ -55,7 +53,7 @@ fn validator_upgrade_should_work() -> Result<()> { } #[test] -fn validator_upgrade_should_run_migration() -> Result<()> { +fn executor_upgrade_should_run_migration() -> Result<()> { let (_rt, _peer, client) = ::new().with_port(10_990).start_with_runtime(); wait_for_genesis_committed(&vec![client.clone()], 0); @@ -79,9 +77,9 @@ fn validator_upgrade_should_run_migration() -> Result<()> { &json!({ "domain_id": DomainId::from_str("wonderland").unwrap() }), ))); - upgrade_validator( + upgrade_executor( &client, - "tests/integration/smartcontracts/validator_with_custom_token", + "tests/integration/smartcontracts/executor_with_custom_token", )?; // Check that `CanUnregisterDomain` doesn't exist @@ -128,31 +126,32 @@ fn migration_fail_should_not_cause_any_effects() { "failed_migration_test_domain".parse().expect("Valid"); assert_domain_does_not_exist(&client, &domain_registered_in_migration); - let _err = upgrade_validator( + let _err = upgrade_executor( &client, - "tests/integration/smartcontracts/validator_with_migration_fail", + "tests/integration/smartcontracts/executor_with_migration_fail", ) .expect_err("Upgrade should fail due to migration failure"); // Checking that things registered in migration does not exist after failed migration assert_domain_does_not_exist(&client, &domain_registered_in_migration); - // The fact that query in previous assertion does not fail means that validator haven't - // been changed, because `validator_with_migration_fail` does not allow any queries + // The fact that query in previous assertion does not fail means that executor haven't + // been changed, because `executor_with_migration_fail` does not allow any queries } -fn upgrade_validator(client: &Client, validator: impl AsRef) -> Result<()> { - info!("Building validator"); +fn upgrade_executor(client: &Client, executor: impl AsRef) -> Result<()> { + info!("Building executor"); - let wasm = iroha_wasm_builder::Builder::new(validator.as_ref()) + let wasm = iroha_wasm_builder::Builder::new(executor.as_ref()) + .show_output() .build()? .optimize()? .into_bytes()?; info!("WASM size is {} bytes", wasm.len()); - let upgrade_validator = UpgradeBox::new(Validator::new(WasmSmartContract::from_compiled(wasm))); - client.submit_blocking(upgrade_validator)?; + let upgrade_executor = UpgradeExpr::new(Executor::new(WasmSmartContract::from_compiled(wasm))); + client.submit_blocking(upgrade_executor)?; Ok(()) } diff --git a/client/tests/wasm/utils.rs b/client/tests/wasm/utils.rs index 5aefbd7dd06..53f3516ea68 100644 --- a/client/tests/wasm/utils.rs +++ b/client/tests/wasm/utils.rs @@ -1,9 +1,3 @@ -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] - /// Return string containing exported memory, dummy allocator, and /// host function imports which you can embed into your wasm module. /// @@ -41,8 +35,8 @@ pub fn wasm_template(hex_val: &str) -> String { nop) "#, memory_name = "memory", - alloc_fn_name = "_iroha_wasm_alloc", - dealloc_fn_name = "_iroha_wasm_dealloc", + alloc_fn_name = "_iroha_smart_contract_alloc", + dealloc_fn_name = "_iroha_smart_contract_dealloc", execute_instruction = "execute_instruction", execute_query = "execute_query", hex_val = escape_hex(hex_val), diff --git a/client_cli/Cargo.toml b/client_cli/Cargo.toml index b75ecf0bc3c..29ee5b8827b 100644 --- a/client_cli/Cargo.toml +++ b/client_cli/Cargo.toml @@ -14,6 +14,9 @@ license.workspace = true keywords.workspace = true categories = ["cryptography::cryptocurrencies", "command-line-utilities"] +[lints] +workspace = true + [badges] is-it-maintained-issue-resolution = { repository = "https://github.com/hyperledger/iroha" } is-it-maintained-open-issues = { repository = "https://github.com/hyperledger/iroha" } @@ -28,13 +31,13 @@ iroha_config = { workspace = true } color-eyre = { workspace = true } # TODO: migrate to clap v4 (and use the workspace dependency) -clap = { version = "3.2.23", features = ["derive"] } -dialoguer = { version = "0.10.2", default-features = false } +clap = { version = "3.2.25", features = ["derive"] } +dialoguer = { version = "0.11.0", default-features = false } json5 = { workspace = true } once_cell = { workspace = true } serde_json = { workspace = true } -erased-serde = "0.3.24" +erased-serde = "0.3.31" [build-dependencies] -vergen = { version = "8.1.1", default-features = false } +vergen = { version = "8.2.5", default-features = false } color-eyre = "0.6.2" diff --git a/client_cli/README.md b/client_cli/README.md index 52e8a7e37d2..a2242468444 100644 --- a/client_cli/README.md +++ b/client_cli/README.md @@ -141,7 +141,7 @@ Generally it looks like this: ./iroha_client_cli ENTITY list filter PREDICATE ``` -Where ENTITY is asset, account or domain and PREDICATE is condition used for filtering serialized using JSON (check `ValuePredicate` and `GenericPredicateBox` in [schema](https://github.com/hyperledger/iroha/blob/iroha2-dev/docs/source/references/schema.json) for reference). +Where ENTITY is asset, account or domain and PREDICATE is condition used for filtering serialized using JSON5 (check `iroha_data_model::predicate::value::ValuePredicate` type). Examples: diff --git a/client_cli/pytests/README.md b/client_cli/pytests/README.md index 7500ba95e7d..fb740fc04b8 100644 --- a/client_cli/pytests/README.md +++ b/client_cli/pytests/README.md @@ -39,27 +39,29 @@ This test framework uses [Poetry](https://python-poetry.org/) for dependency man 1. Install Poetry by following the [official installation guide](https://python-poetry.org/docs/#installation). -2. Navigate to the `client_cli/pytest` directory in your terminal. +2. Navigate to the `client_cli/pytests` directory in your terminal. 3. Install the dependencies and set up a virtual environment using Poetry: - - ```bash - poetry install - -4. Activate the virtual environment: - ```bash - poetry shell -Now, you should be in the virtual environment with all the required dependencies installed. All the subsequent commands (e.g., pytest, allure) should be executed within this virtual environment. - -5. When you're done working in the virtual environment, deactivate it by running: ```bash - exit + poetry install + ``` +4. Activate the virtual environment: + ```bash + poetry shell + ``` + Now, you should be in the virtual environment with all the required dependencies installed. All the subsequent commands (e.g., pytest, allure) should be executed within this virtual environment. + 5. When you're done working in the virtual environment, deactivate it by running: + ```bash + exit + ``` ## Run tests To run tests and generate a report in the allure-results folder, execute the following command: -```pytest -k "not xfail" --alluredir allure-results``` +```bash +pytest -k "not xfail" --alluredir allure-results +``` The `--alluredir` option specifies the directory where the report should be stored. @@ -67,7 +69,9 @@ The `--alluredir` option specifies the directory where the report should be stor To launch a web server that serves the Allure report generated, run: -```allure serve allure-results``` +```bash +allure serve allure-results +``` The `allure-results` argument specifies the directory where the report is stored. After running this command, you should be able to view the report in your web browser by navigating to `http://localhost:port`, where port is the port number displayed in the console output. diff --git a/client_cli/pytests/src/client_cli/client_cli.py b/client_cli/pytests/src/client_cli/client_cli.py index 9e46b2b70a8..f715f31b246 100644 --- a/client_cli/pytests/src/client_cli/client_cli.py +++ b/client_cli/pytests/src/client_cli/client_cli.py @@ -246,18 +246,23 @@ def should(self, _expected): """ return self - def execute(self): + def execute(self, command=None): """ Executes the command and captures stdout and stderr. :return: The current ClientCli object. :rtype: ClientCli """ - command = '\n'.join(self.command) - with allure.step(f'{command} on the {str(self.config.torii_api_port)} peer'): + if command is None: + command = self.command + else: + command = [self.BASE_PATH] + self.BASE_FLAGS + command.split() + allure_command = ' '.join(map(str, command[3:])) + print(allure_command) + with allure.step(f'{allure_command} on the {str(self.config.torii_api_port)} peer'): try: with subprocess.Popen( - self.command, + command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True diff --git a/client_cli/pytests/src/client_cli/have.py b/client_cli/pytests/src/client_cli/have.py index e9946303056..3bb51878891 100644 --- a/client_cli/pytests/src/client_cli/have.py +++ b/client_cli/pytests/src/client_cli/have.py @@ -21,17 +21,26 @@ def expected_in_actual(expected, actual) -> bool: return expected in actual -def domain(expected): +def domain(expected, owned_by=None): """ Check if the expected domain is present in the list of domains. + Optionally checks if the domain is owned by a specific owner. :param expected: The expected domain object. - :return: True if the domain is present, False otherwise. + :param owned_by: The owner of the domain, default is None. + :return: True if the domain is present (and owned by the specified owner if provided), False otherwise. """ def domain_in_domains() -> bool: domains = iroha.list_filter(f'{{"Identifiable": {{"Is": "{expected}"}}}}').domains() - return expected_in_actual(expected, domains) + if not expected_in_actual(expected, domains): + return False + if owned_by: + domain_info = domains.get(expected) + if not domain_info or domain_info.get('owned_by') != str(owned_by): + return False + + return True return client_cli.wait_for(domain_in_domains) diff --git a/client_cli/pytests/src/client_cli/iroha.py b/client_cli/pytests/src/client_cli/iroha.py index 60ccc19ce3a..38174fefeac 100644 --- a/client_cli/pytests/src/client_cli/iroha.py +++ b/client_cli/pytests/src/client_cli/iroha.py @@ -43,7 +43,7 @@ def should(self, _expected): """ return self - def domains(self) -> List[str]: + def domains(self) -> Dict[str, Dict]: """ Retrieve domains from the Iroha network and return then as list of ids. @@ -52,8 +52,8 @@ def domains(self) -> List[str]: """ self._execute_command('domain') domains = json.loads(self.stdout) - domains = [domain["id"] for domain in domains] - return domains + domains_dict = { domain["id"]: domain for domain in domains } + return domains_dict def accounts(self) -> List[str]: """ diff --git a/client_cli/pytests/test/domains/conftest.py b/client_cli/pytests/test/domains/conftest.py index 38016b6a130..470411fe5a5 100644 --- a/client_cli/pytests/test/domains/conftest.py +++ b/client_cli/pytests/test/domains/conftest.py @@ -10,6 +10,9 @@ GIVEN_string_with_reserved_character, GIVEN_string_with_whitespaces, GIVEN_existing_domain_with_uppercase_letter, + GIVEN_currently_authorized_account, + GIVEN_new_one_existing_account, + GIVEN_public_key, before_each) @pytest.fixture(scope="function", autouse=True) diff --git a/client_cli/pytests/test/domains/test_register_domains.py b/client_cli/pytests/test/domains/test_register_domains.py index adce5246453..df921966500 100644 --- a/client_cli/pytests/test/domains/test_register_domains.py +++ b/client_cli/pytests/test/domains/test_register_domains.py @@ -15,7 +15,7 @@ def test_register_domain( GIVEN_fake_name): with allure.step( f'WHEN client_cli registers the domain name "{GIVEN_fake_name}"'): - client_cli.register().domain(GIVEN_fake_name) + client_cli.execute(f'domain register --id={GIVEN_fake_name}') with allure.step( f'THEN Iroha should have the domain name "{GIVEN_fake_name}"'): iroha.should(have.domain(GIVEN_fake_name)) diff --git a/client_cli/pytests/test/domains/test_transfer_domains.py b/client_cli/pytests/test/domains/test_transfer_domains.py new file mode 100644 index 00000000000..ac8cb4d0f3d --- /dev/null +++ b/client_cli/pytests/test/domains/test_transfer_domains.py @@ -0,0 +1,26 @@ +import allure +import pytest + +from src.client_cli import client_cli, iroha, have + +@pytest.fixture(scope="function", autouse=True) +def story_account_transfers_domain(): + allure.dynamic.story('Account transfers a domain') + allure.dynamic.label('permission', 'no_permission_required') + +@allure.label('sdk_test_id', 'transfer_domain_successfully') +def test_transfer_domain( + GIVEN_currently_authorized_account, + GIVEN_new_one_existing_account, + GIVEN_new_one_existing_domain, +): + with allure.step( + f'WHEN {GIVEN_currently_authorized_account} transfers domains ' + f'to {GIVEN_new_one_existing_account}'): + client_cli.execute(f'domain transfer ' + f'--from={GIVEN_currently_authorized_account} ' + f'--to={GIVEN_new_one_existing_account} ' + f'--id={GIVEN_new_one_existing_domain.name}') + with allure.step( + f'THEN {GIVEN_new_one_existing_account} should own {GIVEN_new_one_existing_domain}'): + iroha.should(have.domain(GIVEN_new_one_existing_domain.name, owned_by=GIVEN_new_one_existing_account)) diff --git a/client_cli/src/main.rs b/client_cli/src/main.rs index b1ec7516d13..f2f98f9e05b 100644 --- a/client_cli/src/main.rs +++ b/client_cli/src/main.rs @@ -1,16 +1,4 @@ //! iroha client command line -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] -#![allow( - missing_docs, - clippy::print_stdout, - clippy::use_debug, - clippy::print_stderr -)] - use std::{ fmt, fs::{self, read as read_file}, @@ -58,7 +46,7 @@ impl FromStr for Metadata { /// Client configuration wrapper. Allows getting itself from arguments from cli (from user supplied file). #[derive(Debug, Clone)] -pub struct Configuration(pub ClientConfiguration); +struct Configuration(pub ClientConfiguration); impl FromStr for Configuration { type Err = Error; @@ -73,8 +61,8 @@ impl FromStr for Configuration { /// Iroha CLI Client provides an ability to interact with Iroha Peers Web API without direct network usage. #[derive(StructOpt, Debug)] -#[structopt(name = "iroha_client_cli", version = concat!(env!("CARGO_PKG_VERSION")), author)] -pub struct Args { +#[structopt(name = "iroha_client_cli", version = concat!("version=", env!("CARGO_PKG_VERSION"), " git_commit_sha=", env!("VERGEN_GIT_SHA")), author)] +struct Args { /// Sets a config file path #[structopt(short, long)] config: Option, @@ -92,7 +80,7 @@ pub struct Args { } #[derive(StructOpt, Debug)] -pub enum Subcommand { +enum Subcommand { /// The subcommand related to domains #[clap(subcommand)] Domain(domain::Args), @@ -117,7 +105,7 @@ pub enum Subcommand { } /// Context inside which command is executed -pub trait RunContext { +trait RunContext { /// Get access to configuration fn configuration(&self) -> &ClientConfiguration; @@ -154,7 +142,7 @@ impl RunContext for PrintJsonContext { } /// Runs subcommand -pub trait RunArgs { +trait RunArgs { /// Runs command /// /// # Errors @@ -196,7 +184,6 @@ fn main() -> Result<()> { config } else { let config_path = ConfigPath::default(&DEFAULT_CONFIG_PATH); - #[allow(clippy::expect_used)] Configuration::from_str( config_path .first_existing_path() @@ -231,7 +218,7 @@ fn main() -> Result<()> { /// # Errors /// Fails if submitting over network fails #[allow(clippy::shadow_unrelated)] -pub fn submit( +fn submit( instructions: impl Into, metadata: UnlimitedMetadata, context: &mut dyn RunContext, @@ -283,14 +270,13 @@ mod filter { /// Filter for queries #[derive(Clone, Debug, clap::Parser)] pub struct Filter { - /// Predicate for filtering given as JSON string + /// Predicate for filtering given as JSON5 string #[clap(value_parser = parse_filter)] pub predicate: PredicateBox, } fn parse_filter(s: &str) -> Result { - serde_json::from_str(s) - .map_err(|err| format!("Failed to deserialize filter from JSON: {err}")) + json5::from_str(s).map_err(|err| format!("Failed to deserialize filter from JSON5: {err}")) } } @@ -321,7 +307,7 @@ mod events { } } - pub fn listen(filter: FilterBox, context: &mut dyn RunContext) -> Result<()> { + fn listen(filter: FilterBox, context: &mut dyn RunContext) -> Result<()> { let iroha_client = Client::new(context.configuration())?; eprintln!("Listening to events with filter: {filter:?}"); iroha_client @@ -353,7 +339,7 @@ mod blocks { } } - pub fn listen(height: NonZeroU64, context: &mut dyn RunContext) -> Result<()> { + fn listen(height: NonZeroU64, context: &mut dyn RunContext) -> Result<()> { let iroha_client = Client::new(context.configuration())?; eprintln!("Listening to blocks from height: {height}"); iroha_client @@ -377,11 +363,13 @@ mod domain { /// List domains #[clap(subcommand)] List(List), + /// Transfer domain + Transfer(Transfer), } impl RunArgs for Args { fn run(self, context: &mut dyn RunContext) -> Result<()> { - match_all!((self, context), { Args::Register, Args::List }) + match_all!((self, context), { Args::Register, Args::List, Args::Transfer }) } } @@ -402,7 +390,7 @@ mod domain { id, metadata: Metadata(metadata), } = self; - let create_domain = RegisterBox::new(Domain::new(id)); + let create_domain = RegisterExpr::new(Domain::new(id)); submit([create_domain], metadata, context).wrap_err("Failed to create domain") } } @@ -425,13 +413,45 @@ mod domain { .request(client::domain::all()) .wrap_err("Failed to get all domains"), Self::Filter(filter) => client - .request_with_filter(client::domain::all(), filter.predicate) + .build_query(client::domain::all()) + .with_filter(filter.predicate) + .execute() .wrap_err("Failed to get filtered domains"), }?; context.print_data(&vec.collect::>>()?)?; Ok(()) } } + + /// Transfer a domain between accounts + #[derive(Debug, StructOpt)] + pub struct Transfer { + /// Domain name as double-quited string + #[structopt(short, long)] + pub id: DomainId, + /// Account from which to transfer (in form `name@domain_name') + #[structopt(short, long)] + pub from: AccountId, + /// Account to which to transfer (in form `name@domain_name') + #[structopt(short, long)] + pub to: AccountId, + /// The JSON/JSON5 file with key-value metadata pairs + #[structopt(short, long, default_value = "")] + pub metadata: super::Metadata, + } + + impl RunArgs for Transfer { + fn run(self, context: &mut dyn RunContext) -> Result<()> { + let Self { + id, + from, + to, + metadata: Metadata(metadata), + } = self; + let transfer_domain = TransferExpr::new(from, id, to); + submit([transfer_domain], metadata, context).wrap_err("Failed to transfer domain") + } + } } mod account { @@ -491,7 +511,7 @@ mod account { key, metadata: Metadata(metadata), } = self; - let create_account = RegisterBox::new(Account::new(id, [key])); + let create_account = RegisterExpr::new(Account::new(id, [key])); submit([create_account], metadata, context).wrap_err("Failed to register account") } } @@ -519,8 +539,9 @@ mod account { let deser_err_msg = format!("Failed to deserialize signature condition from file {}", &s); let content = fs::read_to_string(s).wrap_err(err_msg)?; - let condition: EvaluatesTo = json5::from_str(&content).wrap_err(deser_err_msg)?; - Ok(Self(SignatureCheckCondition::new(condition))) + let condition: SignatureCheckCondition = + json5::from_str(&content).wrap_err(deser_err_msg)?; + Ok(Self(condition)) } } @@ -541,7 +562,7 @@ mod account { condition: Signature(condition), metadata: Metadata(metadata), } = self; - let mint_box = MintBox::new(account, EvaluatesTo::new_unchecked(condition)); + let mint_box = MintExpr::new(account, EvaluatesTo::new_unchecked(condition)); submit([mint_box], metadata, context).wrap_err("Failed to set signature condition") } } @@ -564,7 +585,9 @@ mod account { .request(client::account::all()) .wrap_err("Failed to get all accounts"), Self::Filter(filter) => client - .request_with_filter(client::account::all(), filter.predicate) + .build_query(client::account::all()) + .with_filter(filter.predicate) + .execute() .wrap_err("Failed to get filtered accounts"), }?; context.print_data(&vec.collect::>>()?)?; @@ -576,7 +599,7 @@ mod account { pub struct Grant { /// Account id #[structopt(short, long)] - pub id: ::Id, + pub id: AccountId, /// The JSON/JSON5 file with a permission token #[structopt(short, long)] pub permission: Permission, @@ -610,7 +633,7 @@ mod account { permission, metadata: Metadata(metadata), } = self; - let grant = GrantBox::new(permission.0, id); + let grant = GrantExpr::new(permission.0, id); submit([grant], metadata, context) .wrap_err("Failed to grant the permission to the account") } @@ -621,7 +644,7 @@ mod account { pub struct ListPermissions { /// Account id #[structopt(short, long)] - id: ::Id, + id: AccountId, } impl RunArgs for ListPermissions { @@ -703,7 +726,7 @@ mod asset { if unmintable { asset_definition = asset_definition.mintable_once(); } - let create_asset_definition = RegisterBox::new(asset_definition); + let create_asset_definition = RegisterExpr::new(asset_definition); submit([create_asset_definition], metadata, context) .wrap_err("Failed to register asset") } @@ -734,7 +757,7 @@ mod asset { quantity, metadata: Metadata(metadata), } = self; - let mint_asset = MintBox::new( + let mint_asset = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new(asset, account)), ); @@ -768,7 +791,7 @@ mod asset { quantity, metadata: Metadata(metadata), } = self; - let burn_asset = BurnBox::new( + let burn_asset = BurnExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new(asset, account)), ); @@ -783,7 +806,7 @@ mod asset { /// Account from which to transfer (in form `name@domain_name') #[structopt(short, long)] pub from: AccountId, - /// Account from which to transfer (in form `name@domain_name') + /// Account to which to transfer (in form `name@domain_name') #[structopt(short, long)] pub to: AccountId, /// Asset id to transfer (in form like `name#domain_name') @@ -806,7 +829,7 @@ mod asset { quantity, metadata: Metadata(metadata), } = self; - let transfer_asset = TransferBox::new( + let transfer_asset = TransferExpr::new( IdBox::AssetId(AssetId::new(asset_id, from)), quantity.to_value(), IdBox::AccountId(to), @@ -857,7 +880,9 @@ mod asset { .request(client::asset::all()) .wrap_err("Failed to get all assets"), Self::Filter(filter) => client - .request_with_filter(client::asset::all(), filter.predicate) + .build_query(client::asset::all()) + .with_filter(filter.predicate) + .execute() .wrap_err("Failed to get filtered assets"), }?; context.print_data(&vec.collect::>>()?)?; @@ -908,7 +933,7 @@ mod peer { key, metadata: Metadata(metadata), } = self; - let register_peer = RegisterBox::new(Peer::new(PeerId::new(&address, &key))); + let register_peer = RegisterExpr::new(Peer::new(PeerId::new(&address, &key))); submit([register_peer], metadata, context).wrap_err("Failed to register peer") } } @@ -934,7 +959,7 @@ mod peer { key, metadata: Metadata(metadata), } = self; - let unregister_peer = UnregisterBox::new(IdBox::PeerId(PeerId::new(&address, &key))); + let unregister_peer = UnregisterExpr::new(IdBox::PeerId(PeerId::new(&address, &key))); submit([unregister_peer], metadata, context).wrap_err("Failed to unregister peer") } } @@ -991,7 +1016,7 @@ mod json { reader.read_to_end(&mut raw_content)?; let string_content = String::from_utf8(raw_content)?; - let instructions: Vec = json5::from_str(&string_content)?; + let instructions: Vec = json5::from_str(&string_content)?; submit(instructions, UnlimitedMetadata::new(), context) .wrap_err("Failed to submit parsed instructions") } diff --git a/config/Cargo.toml b/config/Cargo.toml index 89a01d1bc68..e4caf3da84d 100644 --- a/config/Cargo.toml +++ b/config/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [dependencies] iroha_config_base = { workspace = true } iroha_data_model = { workspace = true } @@ -30,10 +33,8 @@ path-absolutize = { workspace = true } once_cell = { workspace = true } [dev-dependencies] -proptest = { workspace = true } +proptest = "1.3.1" +stacker = "0.1.15" [features] -default = [] tokio-console = [] -# Workaround to avoid activating `tokio-console` with `--all-features` flag, because `tokio-console` require `tokio_unstable` rustc flag -no-tokio-console = [] diff --git a/config/base/Cargo.toml b/config/base/Cargo.toml index 67734ac2ca7..b11b28ea577 100644 --- a/config/base/Cargo.toml +++ b/config/base/Cargo.toml @@ -7,12 +7,16 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [dependencies] iroha_config_derive = { workspace = true } iroha_crypto = { workspace = true, features = ["std"] } serde = { workspace = true, default-features = false, features = ["derive"] } serde_json = { workspace = true, features = ["alloc"] } +parking_lot = { workspace = true } json5 = { workspace = true } thiserror = { workspace = true } displaydoc = { workspace = true } diff --git a/config/base/derive/Cargo.toml b/config/base/derive/Cargo.toml index 7b446dbe35e..8aa95845755 100644 --- a/config/base/derive/Cargo.toml +++ b/config/base/derive/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true diff --git a/config/base/derive/src/lib.rs b/config/base/derive/src/lib.rs index 9ed2b75e900..f86d6af896b 100644 --- a/config/base/derive/src/lib.rs +++ b/config/base/derive/src/lib.rs @@ -1,7 +1,5 @@ //! Contains various configuration related macro definitions. -#![allow(clippy::arithmetic_side_effects, clippy::std_instead_of_core)] - use proc_macro::TokenStream; pub(crate) mod documented; diff --git a/config/base/derive/src/proxy.rs b/config/base/derive/src/proxy.rs index 8eb42326f1a..7a1e170f2e5 100644 --- a/config/base/derive/src/proxy.rs +++ b/config/base/derive/src/proxy.rs @@ -68,7 +68,6 @@ pub fn impl_override(ast: &StructWithFields) -> TokenStream { .into() } -#[allow(clippy::str_to_string)] pub fn impl_load_from_env(ast: &StructWithFields) -> TokenStream { let env_fetcher_ident = quote! { env_fetcher }; let fetch_env_trait = quote! { ::iroha_config_base::proxy::FetchEnv }; @@ -122,8 +121,11 @@ pub fn impl_load_from_env(ast: &StructWithFields) -> TokenStream { .transpose()?; }; if field.has_inner { + let maybe_map_box = gen_maybe_map_box(inner_ty); set_field.extend(quote! { - let inner_proxy = <#inner_ty as #env_trait>::from_env(#env_fetcher_ident)?; + let inner_proxy = <#inner_ty as #env_trait>::from_env(#env_fetcher_ident) + #maybe_map_box + ?; let #ident = if let Some(old_inner) = #ident { Some(<#inner_ty as ::iroha_config_base::proxy::Override>::override_with(old_inner, inner_proxy)) } else { @@ -195,12 +197,7 @@ fn gen_proxy_struct(mut ast: StructWithFields) -> StructWithFields { // For fields of `Configuration` that have an inner config, the corresponding // proxy field should have a `..Proxy` type there as well if field.has_inner { - #[allow(clippy::expect_used)] - if let Type::Path(path) = &mut field.ty { - let old_ident = &path.path.segments.last().expect("Can't be empty").ident; - let new_ident = format_ident!("{}Proxy", old_ident); - path.path.segments.last_mut().expect("Can't be empty").ident = new_ident; - } + proxify_field_type(&mut field.ty); } let ty = &field.ty; field.ty = parse_quote! { @@ -229,6 +226,22 @@ fn gen_proxy_struct(mut ast: StructWithFields) -> StructWithFields { ast } +#[allow(clippy::expect_used)] +pub fn proxify_field_type(field_ty: &mut syn::Type) { + if let Type::Path(path) = field_ty { + let last_segment = path.path.segments.last_mut().expect("Can't be empty"); + if last_segment.ident == "Box" { + let box_generic = utils::extract_box_generic(last_segment); + // Recursion + proxify_field_type(box_generic) + } else { + // TODO: Wouldn't it be better to get it as an associated type? + let new_ident = format_ident!("{}Proxy", last_segment.ident); + last_segment.ident = new_ident; + } + } +} + pub fn impl_build(ast: &StructWithFields) -> TokenStream { let checked_fields = gen_none_fields_check(ast); let proxy_name = &ast.ident; @@ -258,12 +271,17 @@ fn gen_none_fields_check(ast: &StructWithFields) -> proc_macro2::TokenStream { if field.has_inner { let inner_ty = get_inner_type("Option", &field.ty); let builder_trait = quote! { ::iroha_config_base::proxy::Builder }; + + let maybe_map_box = gen_maybe_map_box(inner_ty); + quote! { #ident: <#inner_ty as #builder_trait>::build( self.#ident.ok_or( #missing_field{field: stringify!(#ident), message: ""} )? - )? + ) + #maybe_map_box + ? } } else { quote! { @@ -278,6 +296,18 @@ fn gen_none_fields_check(ast: &StructWithFields) -> proc_macro2::TokenStream { } } +fn gen_maybe_map_box(inner_ty: &syn::Type) -> proc_macro2::TokenStream { + if let Type::Path(path) = &inner_ty { + let last_segment = path.path.segments.last().expect("Can't be empty"); + if last_segment.ident == "Box" { + return quote! { + .map(Box::new) + }; + } + } + quote! {} +} + /// Helper function to be used as an empty fallback for [`impl LoadFromEnv`] or [`impl LoadFromDisk`]. /// Only meant for proxy types usage. fn gen_none_fields_proxy(ast: &StructWithFields) -> proc_macro2::TokenStream { diff --git a/config/base/derive/src/utils.rs b/config/base/derive/src/utils.rs index 239fd0c7c5c..3e8fa7e1f1c 100644 --- a/config/base/derive/src/utils.rs +++ b/config/base/derive/src/utils.rs @@ -107,7 +107,6 @@ pub struct StructField { impl StructField { fn from_ast(field: syn::Field, env_prefix: &str) -> Self { - #[allow(clippy::expect_used)] let field_ident = field .ident .expect("Already checked for named fields at parsing"); @@ -270,14 +269,13 @@ pub fn is_option_type(ty: &Type) -> bool { pub fn remove_attr_from_struct(ast: &mut StructWithFields, attr_ident: &str) { let StructWithFields { attrs, fields, .. } = ast; for field in fields { - remove_attr(&mut field.attrs, attr_ident) + remove_attr(&mut field.attrs, attr_ident); } remove_attr(attrs, attr_ident); } /// Keep only derive attributes passed as a second argument in struct attributes and field attributes pub fn keep_derive_attr(ast: &mut StructWithFields, kept_attrs: &[&str]) { - #[allow(clippy::expect_used)] ast.attrs .iter_mut() .filter(|attr| attr.path.is_ident("derive")) @@ -299,7 +297,7 @@ pub fn keep_derive_attr(ast: &mut StructWithFields, kept_attrs: &[&str]) { .collect(); *attr = syn::parse_quote!( #[derive(#(#items),*)] - ) + ); } }); } @@ -343,10 +341,25 @@ pub fn gen_lvalue(field_ty: &Type, field_ident: &Ident) -> (TokenStream, TokenSt /// Check if [`StructWithFields`] has `#[builder(parent = ..)]` pub fn get_parent_ty(ast: &StructWithFields) -> Type { - #[allow(clippy::expect_used)] ast.attrs .iter() .find_map(|attr| Builder::::parse(attr).ok()) .map(|builder| builder.parent) .expect("Should not be called on structs with no `#[builder(..)]` attribute") } + +pub fn extract_box_generic(box_seg: &mut syn::PathSegment) -> &mut syn::Type { + let syn::PathArguments::AngleBracketed(generics) = &mut box_seg.arguments else { + panic!("`Box` should have explicit generic"); + }; + + assert!( + generics.args.len() == 1, + "`Box` should have exactly one generic argument" + ); + let syn::GenericArgument::Type(generic_type) = generics.args.first_mut().expect("Can't be empty") else { + panic!("`Box` should have type as a generic argument") + }; + + generic_type +} diff --git a/config/base/derive/src/view.rs b/config/base/derive/src/view.rs index dd80f80b2f9..a020c7edc13 100644 --- a/config/base/derive/src/view.rs +++ b/config/base/derive/src/view.rs @@ -32,7 +32,6 @@ mod gen { ast } - #[allow(clippy::str_to_string, clippy::expect_used)] pub fn view_struct(mut ast: StructWithFields) -> StructWithFields { // Remove fields with #[view(ignore)] ast.fields.retain(is_view_field_ignored); @@ -87,6 +86,24 @@ mod gen { }) .collect::>(); + let field_froms: Vec<_> = fields + .iter() + .map(|field| { + let field_ident = &field.ident; + if let syn::Type::Path(syn::TypePath { path, .. }) = &field.ty { + let last_segment = path.segments.last().expect("Not empty"); + if last_segment.ident == "Box" { + return quote! { + #field_ident: Box::new(core::convert::From::<_>::from(*#field_ident)), + }; + } + } + quote! { + #field_ident: core::convert::From::<_>::from(#field_ident), + } + }) + .collect(); + quote! { impl #impl_generics core::convert::From<#original_ident> for #view_ident #ty_generics #where_clause { fn from(config: #original_ident) -> Self { @@ -100,7 +117,7 @@ mod gen { Self { #( #(#field_cfg_attrs)* - #field_idents: core::convert::From::<_>::from(#field_idents), + #field_froms )* } } diff --git a/config/base/src/lib.rs b/config/base/src/lib.rs index 317e2ad7f11..bee2b692efc 100644 --- a/config/base/src/lib.rs +++ b/config/base/src/lib.rs @@ -501,6 +501,32 @@ pub mod proxy { ) -> Result, Self::Error>; } + impl Documented for Box { + type Error = T::Error; + + fn get_docs() -> Value { + T::get_docs() + } + + fn get_inner_docs() -> String { + T::get_inner_docs() + } + + fn get_recursive<'tl, U>(&self, inner_field: U) -> Result + where + U: AsRef<[&'tl str]> + Send + 'tl, + { + T::get_recursive(self, inner_field) + } + + #[allow(single_use_lifetimes)] // False-positive + fn get_doc_recursive<'tl>( + field: impl AsRef<[&'tl str]>, + ) -> Result, Self::Error> { + T::get_doc_recursive(field) + } + } + /// Trait for combining two configuration instances pub trait Override: Serialize + DeserializeOwned + Sized { /// If any of the fields in `other` are filled, they @@ -509,6 +535,12 @@ pub mod proxy { fn override_with(self, other: Self) -> Self; } + impl Override for Box { + fn override_with(self, other: Self) -> Self { + Box::new(T::override_with(*self, *other)) + } + } + /// Trait for configuration loading and deserialization from /// the environment pub trait LoadFromEnv: Sized { @@ -540,6 +572,14 @@ pub mod proxy { } } + impl LoadFromEnv for Box { + type ReturnValue = T::ReturnValue; + + fn from_env(fetcher: &F) -> Self::ReturnValue { + T::from_env(fetcher) + } + } + /// Abstraction over the actual implementation of how env variables are gotten /// from the environment. Necessary for mocking in tests. pub trait FetchEnv { @@ -576,6 +616,14 @@ pub mod proxy { fn build(self) -> Self::ReturnValue; } + impl Builder for Box { + type ReturnValue = T::ReturnValue; + + fn build(self) -> Self::ReturnValue { + T::build(*self) + } + } + /// Deserialization helper for proxy fields that wrap an `Option` /// /// # Errors diff --git a/config/base/src/runtime_upgrades.rs b/config/base/src/runtime_upgrades.rs index ac539aad6ab..95b69e0e13d 100644 --- a/config/base/src/runtime_upgrades.rs +++ b/config/base/src/runtime_upgrades.rs @@ -1,6 +1,4 @@ //! Module handling runtime upgrade logic. -#![allow(clippy::std_instead_of_core, clippy::std_instead_of_alloc)] - pub use serde::{Deserialize, Serialize}; use thiserror::*; @@ -144,10 +142,11 @@ pub trait Reload { pub mod handle { use std::{ fmt::{Debug, Formatter}, - sync::{Arc, Mutex}, + sync::Arc, }; use crossbeam::atomic::AtomicCell; + use parking_lot::Mutex; use serde::{Deserialize, Serialize}; use super::{Reload, ReloadError, ReloadMut, Result}; @@ -179,10 +178,7 @@ pub mod handle { /// # Errors /// [`ReloadError::Poisoned`] When the [`Mutex`] storing the reload handle is poisoned. pub fn set(&self, handle: impl ReloadMut + Send + Sync + 'static) { - *self - .inner - .lock() - .expect("Mutex in `Singleton::set` got poisoned") = Some(Box::new(handle)); + *self.inner.lock() = Some(Box::new(handle)); } } @@ -194,7 +190,7 @@ pub mod handle { impl Reload for Singleton { fn reload(&self, item: T) -> Result<()> { - match &mut *self.inner.lock().expect("Valid") { + match &mut *self.inner.lock() { Some(handle) => { handle.reload(item)?; Ok(()) @@ -302,7 +298,7 @@ pub mod handle { /// # Errors /// If [`Singleton::set`] fails. pub fn set_handle(&self, other: impl ReloadMut + Send + Sync + 'static) { - self.1.set(other) + self.1.set(other); } } diff --git a/config/base/tests/simple.rs b/config/base/tests/simple.rs index 751add85639..9084f582e30 100644 --- a/config/base/tests/simple.rs +++ b/config/base/tests/simple.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{collections::HashMap, env::VarError, ffi::OsStr}; use iroha_config_base::{ @@ -174,17 +172,14 @@ fn test_can_load_inner_without_the_wrapping_config() { env.remove_var("CONF_OPTIONAL_INNER"); let config = ConfigurationProxy::new_with_placeholders(); let env_config = ConfigurationProxy::from_env(&env).expect("valid env"); - assert_eq!(&env_config.optional_inner, &config.optional_inner) + assert_eq!(&env_config.optional_inner, &config.optional_inner); } #[test] fn test_proxy_combine_does_not_overload_with_none() { let config = ConfigurationProxy::new_with_none(); - dbg!(&config); let env_config = ConfigurationProxy::from_env(&test_env_factory()).expect("valid env"); - dbg!(&env_config); let combine_config = env_config.clone().override_with(config); - dbg!(&combine_config); assert_eq!(&env_config.optional_data, &combine_config.optional_data); } @@ -210,5 +205,5 @@ fn configuration_proxy_from_env_returns_err_on_parsing_error() { let err = Target::from_env(&Env).expect_err("Must not be parsed"); let err = eyre::Report::new(err); - assert_eq!(format!("{err:?}"), "Failed to deserialize the field `FOO`\n\nCaused by:\n JSON5: --> 1:1\n |\n 1 | not u64 for sure\n | ^---\n |\n = expected array, boolean, null, number, object, or string\n\nLocation:\n config/base/tests/simple.rs:212:15"); + assert_eq!(format!("{err:?}"), "Failed to deserialize the field `FOO`\n\nCaused by:\n JSON5: --> 1:1\n |\n 1 | not u64 for sure\n | ^---\n |\n = expected array, boolean, null, number, object, or string\n\nLocation:\n config/base/tests/simple.rs:207:15"); } diff --git a/config/iroha_test_config.json b/config/iroha_test_config.json index d31823402e1..8fd52bc793b 100644 --- a/config/iroha_test_config.json +++ b/config/iroha_test_config.json @@ -5,6 +5,7 @@ "payload": "282ED9F3CF92811C3818DBC4AE594ED59DC1A2F78E4241E31924E101D6B1FB831C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B" }, "DISABLE_PANIC_TERMINAL_COLORS": false, + "EXIT_AFTER_INIT": false, "KURA": { "INIT_MODE": "strict", "BLOCK_STORE_PATH": "./storage", @@ -42,11 +43,8 @@ "TORII": { "P2P_ADDR": "127.0.0.1:1337", "API_URL": "127.0.0.1:8080", - "TELEMETRY_URL": "127.0.0.1:8180", "MAX_TRANSACTION_SIZE": 32768, - "MAX_CONTENT_LEN": 16384000, - "FETCH_SIZE": 10, - "QUERY_IDLE_TIME_MS": 30000 + "MAX_CONTENT_LEN": 16384000 }, "BLOCK_SYNC": { "GOSSIP_PERIOD_MS": 10000, @@ -121,5 +119,8 @@ "CREATE_EVERY_MS": 60000, "DIR_PATH": "./storage", "CREATION_ENABLED": true + }, + "LIVE_QUERY_STORE": { + "QUERY_IDLE_TIME_MS": 30000 } } diff --git a/config/src/block_sync.rs b/config/src/block_sync.rs index 757ecb7948a..6802fcce9c9 100644 --- a/config/src/block_sync.rs +++ b/config/src/block_sync.rs @@ -1,5 +1,4 @@ //! Module for `BlockSynchronizer`-related configuration and structs. -#![allow(clippy::std_instead_of_core)] use iroha_config_base::derive::{Documented, Proxy}; use serde::{Deserialize, Serialize}; diff --git a/config/src/client.rs b/config/src/client.rs index cf740a974de..312bb2a1737 100644 --- a/config/src/client.rs +++ b/config/src/client.rs @@ -1,5 +1,4 @@ //! Module for client-related configuration and structs -#![allow(clippy::std_instead_of_core, clippy::std_instead_of_alloc)] use core::str::FromStr; use std::num::NonZeroU64; @@ -81,8 +80,6 @@ pub struct Configuration { pub basic_auth: Option, /// Torii URL. pub torii_api_url: Url, - /// Status URL. - pub torii_telemetry_url: Url, /// Proposed transaction TTL in milliseconds. pub transaction_time_to_live_ms: Option, /// Transaction status wait timeout in milliseconds. @@ -106,7 +103,6 @@ impl Default for ConfigurationProxy { account_id: None, basic_auth: Some(None), torii_api_url: None, - torii_telemetry_url: None, transaction_time_to_live_ms: Some(Some(DEFAULT_TRANSACTION_TIME_TO_LIVE_MS)), transaction_status_timeout_ms: Some(DEFAULT_TRANSACTION_STATUS_TIMEOUT_MS), transaction_limits: Some(DEFAULT_TRANSACTION_LIMITS), @@ -124,8 +120,8 @@ impl ConfigurationProxy { /// /// # Errors /// - If the [`self.transaction_time_to_live_ms`] field is too small - /// - If the [`self.transaction_status_timeout_ms`] field was smaller than [`self.transaction_time_to_live_ms`] - /// - If the [`self.torii_api_url`] or [`self.torii_telemetry_url`] were malformed or had the wrong protocol + /// - If the [`self.transaction_status_timeout_ms`] field is smaller than [`self.transaction_time_to_live_ms`] + /// - If the [`self.torii_api_url`] is malformed or had the wrong protocol pub fn finish(&mut self) -> Result<()> { if let Some(Some(tx_ttl)) = self.transaction_time_to_live_ms { // Really small TTL would be detrimental to performance @@ -156,22 +152,6 @@ impl ConfigurationProxy { }); } } - if let Some(telemetry_url) = &self.torii_telemetry_url { - if telemetry_url.scheme() != "http" { - eyre::bail!(ConfigError::InsaneValue { - value: telemetry_url.to_string(), - field: "TORII_TELEMETRY_URL", - message: ", because we only support HTTP".to_owned(), - }); - } - if telemetry_url.port().is_none() { - eyre::bail!(ConfigError::InsaneValue{ - value: telemetry_url.to_string(), - field: "TORII_TELEMETRY_URL", - message: ". You haven't provided a connection port, e.g. `8180` in `http://127.0.0.1:8180`".to_owned(), - }); - } - } Ok(()) } @@ -199,13 +179,12 @@ mod tests { use proptest::prelude::*; use super::*; - use crate::torii::{uri::DEFAULT_API_ADDR, DEFAULT_TORII_TELEMETRY_ADDR}; + use crate::torii::uri::DEFAULT_API_ADDR; const CONFIGURATION_PATH: &str = "../configs/client/config.json"; prop_compose! { // TODO: make tests to check generated key validity - #[allow(clippy::expect_used)] fn arb_keys_from_seed() (seed in prop::collection::vec(any::(), 33..64)) -> (PublicKey, PrivateKey) { let (public_key, private_key) = KeyPair::generate_with_configuration(KeyGenConfiguration::default().use_seed(seed)).expect("Seed was invalid").into(); @@ -214,7 +193,6 @@ mod tests { } prop_compose! { - #[allow(clippy::expect_used)] fn arb_keys_with_option() (keys in arb_keys_from_seed()) ((a, b) in (prop::option::of(Just(keys.0)), prop::option::of(Just(keys.1)))) @@ -223,8 +201,7 @@ mod tests { } } - #[allow(clippy::expect_used)] - fn placeholder_account() -> ::Id { + fn placeholder_account() -> AccountId { AccountId::from_str("alice@wonderland").expect("Invalid account Id ") } @@ -235,20 +212,18 @@ mod tests { account_id in prop::option::of(Just(placeholder_account())), basic_auth in prop::option::of(Just(None)), torii_api_url in prop::option::of(Just(format!("http://{DEFAULT_API_ADDR}").parse().unwrap())), - torii_telemetry_url in prop::option::of(Just(format!("http://{DEFAULT_TORII_TELEMETRY_ADDR}").parse().unwrap())), transaction_time_to_live_ms in prop::option::of(Just(Some(DEFAULT_TRANSACTION_TIME_TO_LIVE_MS))), transaction_status_timeout_ms in prop::option::of(Just(DEFAULT_TRANSACTION_STATUS_TIMEOUT_MS)), transaction_limits in prop::option::of(Just(DEFAULT_TRANSACTION_LIMITS)), add_transaction_nonce in prop::option::of(Just(DEFAULT_ADD_TRANSACTION_NONCE)), ) -> ConfigurationProxy { - ConfigurationProxy { public_key, private_key, account_id, basic_auth, torii_api_url, torii_telemetry_url, transaction_time_to_live_ms, transaction_status_timeout_ms, transaction_limits, add_transaction_nonce } + ConfigurationProxy { public_key, private_key, account_id, basic_auth, torii_api_url, transaction_time_to_live_ms, transaction_status_timeout_ms, transaction_limits, add_transaction_nonce } } } proptest! { #[test] - #[allow(clippy::expect_used)] fn client_proxy_build_fails_on_none(proxy in arb_proxy()) { let cfg = proxy.build(); if cfg.is_ok() { @@ -258,7 +233,6 @@ mod tests { let arb_cfg = cfg.expect("Config generated by proptest was checked to be ok by the surrounding if clause"); // Skipping keys and `basic_auth` check as they're different from the file assert_eq!(arb_cfg.torii_api_url, example_cfg.torii_api_url); - assert_eq!(arb_cfg.torii_telemetry_url, example_cfg.torii_telemetry_url); assert_eq!(arb_cfg.account_id, example_cfg.account_id); assert_eq!(arb_cfg.transaction_time_to_live_ms, example_cfg.transaction_time_to_live_ms); assert_eq!(arb_cfg.transaction_status_timeout_ms, example_cfg.transaction_status_timeout_ms); diff --git a/config/src/genesis.rs b/config/src/genesis.rs index 087eabc7114..fe51c5e33a3 100644 --- a/config/src/genesis.rs +++ b/config/src/genesis.rs @@ -1,6 +1,4 @@ //! Module with genesis configuration logic. -#![allow(clippy::std_instead_of_core)] - use iroha_config_base::derive::{view, Documented, Proxy}; use iroha_crypto::{PrivateKey, PublicKey}; use serde::{Deserialize, Serialize}; @@ -38,14 +36,13 @@ pub mod tests { use super::*; /// Key-pair used by default for test purposes - #[allow(clippy::expect_used)] fn placeholder_keypair() -> KeyPair { let public_key = "ed01204CFFD0EE429B1BDD36B3910EC570852B8BB63F18750341772FB46BC856C5CAAF" .parse() .expect("Public key not in multihash format"); let private_key = PrivateKey::from_hex( iroha_crypto::Algorithm::Ed25519, - "D748E18CE60CB30DEA3E73C9019B7AF45A8D465E3D71BCC9A5EF99A008205E534CFFD0EE429B1BDD36B3910EC570852B8BB63F18750341772FB46BC856C5CAAF".as_ref() + "D748E18CE60CB30DEA3E73C9019B7AF45A8D465E3D71BCC9A5EF99A008205E534CFFD0EE429B1BDD36B3910EC570852B8BB63F18750341772FB46BC856C5CAAF" ).expect("Private key not hex encoded"); KeyPair::new(public_key, private_key).expect("Key pair mismatch") diff --git a/config/src/iroha.rs b/config/src/iroha.rs index 779295f6b4a..c5c93be32e1 100644 --- a/config/src/iroha.rs +++ b/config/src/iroha.rs @@ -1,6 +1,4 @@ //! This module contains [`struct@Configuration`] structure and related implementation. -#![allow(clippy::std_instead_of_core)] - use std::fmt::Debug; use iroha_config_base::derive::{view, Documented, Error as ConfigError, Proxy}; @@ -24,16 +22,18 @@ view! { pub private_key: PrivateKey, /// Disable coloring of the backtrace and error report on panic pub disable_panic_terminal_colors: bool, + /// Exit after initialization for startup time testing + pub exit_after_init: bool, /// `Kura` configuration #[config(inner)] - pub kura: kura::Configuration, + pub kura: Box, /// `Sumeragi` configuration #[config(inner)] - #[view(into = sumeragi::ConfigurationView)] - pub sumeragi: sumeragi::Configuration, + #[view(into = Box)] + pub sumeragi: Box, /// `Torii` configuration #[config(inner)] - pub torii: torii::Configuration, + pub torii: Box, /// `BlockSynchronizer` configuration #[config(inner)] pub block_sync: block_sync::Configuration, @@ -42,23 +42,26 @@ view! { pub queue: queue::Configuration, /// `Logger` configuration #[config(inner)] - pub logger: logger::Configuration, + pub logger: Box, /// `GenesisBlock` configuration #[config(inner)] - #[view(into = genesis::ConfigurationView)] - pub genesis: genesis::Configuration, + #[view(into = Box)] + pub genesis: Box, /// `WorldStateView` configuration #[config(inner)] - pub wsv: wsv::Configuration, + pub wsv: Box, /// Network configuration #[config(inner)] pub network: network::Configuration, /// Telemetry configuration #[config(inner)] - pub telemetry: telemetry::Configuration, + pub telemetry: Box, /// SnapshotMaker configuration #[config(inner)] - pub snapshot: snapshot::Configuration, + pub snapshot: Box, + /// LiveQueryStore configuration + #[config(inner)] + pub live_query_store: live_query_store::Configuration, } } @@ -68,17 +71,19 @@ impl Default for ConfigurationProxy { public_key: None, private_key: None, disable_panic_terminal_colors: Some(bool::default()), - kura: Some(kura::ConfigurationProxy::default()), - sumeragi: Some(sumeragi::ConfigurationProxy::default()), - torii: Some(torii::ConfigurationProxy::default()), + exit_after_init: Some(false), + kura: Some(Box::default()), + sumeragi: Some(Box::default()), + torii: Some(Box::default()), block_sync: Some(block_sync::ConfigurationProxy::default()), queue: Some(queue::ConfigurationProxy::default()), - logger: Some(logger::ConfigurationProxy::default()), - genesis: Some(genesis::ConfigurationProxy::default()), - wsv: Some(wsv::ConfigurationProxy::default()), + logger: Some(Box::default()), + genesis: Some(Box::default()), + wsv: Some(Box::default()), network: Some(network::ConfigurationProxy::default()), - telemetry: Some(telemetry::ConfigurationProxy::default()), - snapshot: Some(snapshot::ConfigurationProxy::default()), + telemetry: Some(Box::default()), + snapshot: Some(Box::default()), + live_query_store: Some(live_query_store::ConfigurationProxy::default()), } } } @@ -91,7 +96,6 @@ impl ConfigurationProxy { /// /// # Errors /// - If the relevant uppermost Iroha config fields were not provided. - #[allow(clippy::expect_used, clippy::unwrap_in_result)] pub fn finish(&mut self) -> Result<(), ConfigError> { if let Some(sumeragi_proxy) = &mut self.sumeragi { // First, iroha public/private key and sumeragi keypair are interchangeable, but @@ -171,8 +175,6 @@ impl ConfigurationProxy { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use proptest::prelude::*; use super::*; @@ -181,11 +183,10 @@ mod tests { const CONFIGURATION_PATH: &str = "./iroha_test_config.json"; /// Key-pair used for proptests generation - #[allow(clippy::expect_used)] pub fn placeholder_keypair() -> KeyPair { let private_key = PrivateKey::from_hex( Algorithm::Ed25519, - "282ED9F3CF92811C3818DBC4AE594ED59DC1A2F78E4241E31924E101D6B1FB831C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B".as_ref() + "282ED9F3CF92811C3818DBC4AE594ED59DC1A2F78E4241E31924E101D6B1FB831C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B" ).expect("Private key not hex encoded"); KeyPair::new( @@ -210,26 +211,27 @@ mod tests { fn arb_proxy()( (public_key, private_key) in arb_keys(), disable_panic_terminal_colors in prop::option::of(Just(true)), - kura in prop::option::of(kura::tests::arb_proxy()), - sumeragi in prop::option::of(sumeragi::tests::arb_proxy()), - torii in prop::option::of(torii::tests::arb_proxy()), + exit_after_init in prop::option::of(Just(true)), + kura in prop::option::of(kura::tests::arb_proxy().prop_map(Box::new)), + sumeragi in (prop::option::of(sumeragi::tests::arb_proxy().prop_map(Box::new))), + torii in (prop::option::of(torii::tests::arb_proxy().prop_map(Box::new))), block_sync in prop::option::of(block_sync::tests::arb_proxy()), queue in prop::option::of(queue::tests::arb_proxy()), - logger in prop::option::of(logger::tests::arb_proxy()), - genesis in prop::option::of(genesis::tests::arb_proxy()), - wsv in prop::option::of(wsv::tests::arb_proxy()), + logger in prop::option::of(logger::tests::arb_proxy().prop_map(Box::new)), + genesis in prop::option::of(genesis::tests::arb_proxy().prop_map(Box::new)), + wsv in prop::option::of(wsv::tests::arb_proxy().prop_map(Box::new)), network in prop::option::of(network::tests::arb_proxy()), - telemetry in prop::option::of(telemetry::tests::arb_proxy()), - snapshot in prop::option::of(snapshot::tests::arb_proxy()), + telemetry in prop::option::of(telemetry::tests::arb_proxy().prop_map(Box::new)), + snapshot in prop::option::of(snapshot::tests::arb_proxy().prop_map(Box::new)), + live_query_store in prop::option::of(live_query_store::tests::arb_proxy()), ) -> ConfigurationProxy { - ConfigurationProxy { public_key, private_key, disable_panic_terminal_colors, kura, sumeragi, torii, block_sync, queue, - logger, genesis, wsv, network, telemetry, snapshot } + ConfigurationProxy { public_key, private_key, disable_panic_terminal_colors, exit_after_init, kura, sumeragi, torii, block_sync, + queue, logger, genesis, wsv, network, telemetry, snapshot, live_query_store } } } proptest! { - #[test] - fn iroha_proxy_build_fails_on_none(proxy in arb_proxy()) { + fn __iroha_proxy_build_fails_on_none(proxy in arb_proxy()) { let cfg = proxy.build(); let example_cfg = ConfigurationProxy::from_path(CONFIGURATION_PATH).build().expect("Failed to build example Iroha config"); if cfg.is_ok() { @@ -238,6 +240,13 @@ mod tests { } } + #[test] + fn iroha_proxy_build_fails_on_none() { + // Using `stacker` because test generated by `proptest!` takes too much stack space. + // Allocating 3MB. + stacker::grow(3 * 1024 * 1024, __iroha_proxy_build_fails_on_none) + } + #[test] fn parse_example_json() { let cfg_proxy = ConfigurationProxy::from_path(CONFIGURATION_PATH); diff --git a/config/src/kura.rs b/config/src/kura.rs index 1ce797e8863..9eaed6f19d3 100644 --- a/config/src/kura.rs +++ b/config/src/kura.rs @@ -1,5 +1,4 @@ //! Module for kura-related configuration and structs -#![allow(clippy::std_instead_of_core)] use std::{num::NonZeroU64, path::Path}; use eyre::{eyre, Result}; @@ -28,7 +27,6 @@ pub struct Configuration { } impl Default for ConfigurationProxy { - #[allow(clippy::expect_used)] fn default() -> Self { Self { init_mode: Some(Mode::default()), @@ -75,7 +73,6 @@ pub mod tests { use super::*; prop_compose! { - #[allow(clippy::expect_used)] pub fn arb_proxy() ( init_mode in prop::option::of(Just(Mode::default())), diff --git a/config/src/lib.rs b/config/src/lib.rs index 628fcff9271..6e80c5e1c88 100644 --- a/config/src/lib.rs +++ b/config/src/lib.rs @@ -7,6 +7,7 @@ pub mod client; pub mod genesis; pub mod iroha; pub mod kura; +pub mod live_query_store; pub mod logger; pub mod network; pub mod path; diff --git a/config/src/live_query_store.rs b/config/src/live_query_store.rs new file mode 100644 index 00000000000..79382fee2ca --- /dev/null +++ b/config/src/live_query_store.rs @@ -0,0 +1,44 @@ +//! Module for `LiveQueryStore`-related configuration and structs. + +use std::num::NonZeroU64; + +use iroha_config_base::derive::{Documented, Proxy}; +use serde::{Deserialize, Serialize}; + +/// Default max time a query can remain in the store unaccessed +pub static DEFAULT_QUERY_IDLE_TIME_MS: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| NonZeroU64::new(30_000).unwrap()); + +/// Configuration for `QueryService`. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize, Documented, Proxy)] +#[serde(rename_all = "UPPERCASE")] +#[config(env_prefix = "LIVE_QUERY_STORE_")] +pub struct Configuration { + /// Time query can remain in the store if unaccessed + pub query_idle_time_ms: NonZeroU64, +} + +impl Default for ConfigurationProxy { + fn default() -> Self { + Self { + query_idle_time_ms: Some(*DEFAULT_QUERY_IDLE_TIME_MS), + } + } +} + +#[cfg(test)] +pub mod tests { + use proptest::prelude::*; + + use super::*; + + prop_compose! { + pub fn arb_proxy() + ( + query_idle_time_ms in prop::option::of(Just(*DEFAULT_QUERY_IDLE_TIME_MS)), + ) + -> ConfigurationProxy { + ConfigurationProxy { query_idle_time_ms } + } + } +} diff --git a/config/src/logger.rs b/config/src/logger.rs index d603259468b..bee27fda3df 100644 --- a/config/src/logger.rs +++ b/config/src/logger.rs @@ -1,6 +1,5 @@ //! Module containing logic related to spawning a logger from the //! configuration, as well as run-time reloading of the log-level. -#![allow(clippy::std_instead_of_core)] use core::fmt::Debug; use derive_more::{Deref, DerefMut, From}; @@ -16,7 +15,7 @@ use tracing_subscriber::{filter::LevelFilter, reload::Handle}; const TELEMETRY_CAPACITY: u32 = 1000; const DEFAULT_COMPACT_MODE: bool = false; const DEFAULT_TERMINAL_COLORS: bool = true; -#[cfg(all(feature = "tokio-console", not(feature = "no-tokio-console")))] +#[cfg(feature = "tokio-console")] const DEFAULT_TOKIO_CONSOLE_ADDR: &str = "127.0.0.1:5555"; /// Convert [`Level`] into [`tracing::Level`] @@ -86,7 +85,7 @@ pub struct Configuration { pub log_file_path: Option, /// Enable ANSI terminal colors for formatted output. pub terminal_colors: bool, - #[cfg(all(feature = "tokio-console", not(feature = "no-tokio-console")))] + #[cfg(feature = "tokio-console")] /// Address of tokio console (only available under "tokio-console" feature) pub tokio_console_addr: String, } @@ -99,7 +98,7 @@ impl Default for ConfigurationProxy { compact_mode: Some(DEFAULT_COMPACT_MODE), log_file_path: Some(None), terminal_colors: Some(DEFAULT_TERMINAL_COLORS), - #[cfg(all(feature = "tokio-console", not(feature = "no-tokio-console")))] + #[cfg(feature = "tokio-console")] tokio_console_addr: Some(DEFAULT_TOKIO_CONSOLE_ADDR.into()), } } @@ -119,7 +118,7 @@ pub mod tests { (prop::option::of(Just(DEFAULT_COMPACT_MODE))), (prop::option::of(Just(None))), (prop::option::of(Just(DEFAULT_TERMINAL_COLORS))), - #[cfg(all(feature = "tokio-console", not(feature = "no-tokio-console")))] + #[cfg(feature = "tokio-console")] (prop::option::of(Just(DEFAULT_TOKIO_CONSOLE_ADDR.to_string()))), ); proptest::strategy::Strategy::prop_map(strat, move |strat| ConfigurationProxy { @@ -128,7 +127,7 @@ pub mod tests { compact_mode: strat.2, log_file_path: strat.3, terminal_colors: strat.4, - #[cfg(all(feature = "tokio-console", not(feature = "no-tokio-console")))] + #[cfg(feature = "tokio-console")] tokio_console_addr: strat.5, }) } diff --git a/config/src/network.rs b/config/src/network.rs index 9fe93337294..e5c5ec48e41 100644 --- a/config/src/network.rs +++ b/config/src/network.rs @@ -1,5 +1,4 @@ //! Module for network-related configuration and structs -#![allow(clippy::std_instead_of_core)] use iroha_config_base::derive::{Documented, Proxy}; use serde::{Deserialize, Serialize}; diff --git a/config/src/path.rs b/config/src/path.rs index 8128cd806d2..f6f14887c4e 100644 --- a/config/src/path.rs +++ b/config/src/path.rs @@ -66,7 +66,6 @@ impl Path { /// # Panics /// /// Panics if `path` contains an extension. - #[allow(clippy::panic)] pub fn default(path: &'static std::path::Path) -> Self { assert!( path.extension().is_none(), diff --git a/config/src/queue.rs b/config/src/queue.rs index 24cf5152631..3dde85d60d1 100644 --- a/config/src/queue.rs +++ b/config/src/queue.rs @@ -1,5 +1,4 @@ //! Module for `Queue`-related configuration and structs. -#![allow(clippy::std_instead_of_core, clippy::arithmetic_side_effects)] use iroha_config_base::derive::{Documented, Proxy}; use serde::{Deserialize, Serialize}; diff --git a/config/src/sumeragi.rs b/config/src/sumeragi.rs index dc8099a876e..c6929d441d6 100644 --- a/config/src/sumeragi.rs +++ b/config/src/sumeragi.rs @@ -1,11 +1,11 @@ //! `Sumeragi` configuration. Contains both block commit and Gossip-related configuration. -#![allow(clippy::std_instead_of_core, clippy::arithmetic_side_effects)] -use std::{collections::HashSet, fmt::Debug, fs::File, io::BufReader, path::Path}; +use std::{fmt::Debug, fs::File, io::BufReader, path::Path}; use eyre::{Result, WrapErr}; use iroha_config_base::derive::{view, Documented, Proxy}; use iroha_crypto::prelude::*; use iroha_data_model::prelude::*; +use iroha_primitives::{unique_vec, unique_vec::UniqueVec}; use serde::{Deserialize, Serialize}; use self::default::*; @@ -92,14 +92,13 @@ impl ConfigurationProxy { /// The [`peer_id`] field of [`Self`] /// has not been initialized prior to calling this method. pub fn insert_self_as_trusted_peers(&mut self) { - let mut peers = HashSet::new(); - #[allow(clippy::expect_used)] let peer_id = self .peer_id .clone() .expect("Insertion of `self` as `trusted_peers` implies that `peer_id` field should be initialized"); - peers.insert(peer_id); - self.trusted_peers = Some(TrustedPeers { peers }); + self.trusted_peers = Some(TrustedPeers { + peers: unique_vec![peer_id], + }); } } @@ -122,52 +121,8 @@ impl Configuration { pub struct TrustedPeers { /// Optional list of predefined trusted peers. Must contain unique /// entries. Custom deserializer raises error if duplicates found. - #[serde(deserialize_with = "deserialize_unique_trusted_peers")] - pub peers: HashSet, -} - -/// Custom deserializer that ensures that `trusted_peers` only -/// contains unique `PeerId`'s. -/// -/// # Errors -/// - Peer Ids not unique, -/// - Not a sequence (array) -fn deserialize_unique_trusted_peers<'de, D>(deserializer: D) -> Result, D::Error> -where - D: serde::Deserializer<'de>, -{ - /// Helper, for constructing a unique visitor that errors whenever - /// a duplicate entry is found. - struct UniqueVisitor(core::marker::PhantomData HashSet>); - - impl<'de> serde::de::Visitor<'de> for UniqueVisitor { - type Value = HashSet; - - fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { - formatter.write_str("a set of unique `Peer::Id`s.") - } - - fn visit_seq(self, mut seq: S) -> Result, S::Error> - where - S: serde::de::SeqAccess<'de>, - { - let mut result = HashSet::new(); - while let Some(value) = seq.next_element()? { - if result.contains(&value) { - return Err(serde::de::Error::custom(format!( - "The peer id: {}'s public key appears twice.", - &value - ))); - } - result.insert(value); - } - - Ok(result) - } - } - - let visitor = UniqueVisitor(core::marker::PhantomData); - deserializer.deserialize_seq(visitor) + #[serde(deserialize_with = "UniqueVec::display_deserialize_failing_on_duplicates")] + pub peers: UniqueVec, } impl TrustedPeers { @@ -181,11 +136,9 @@ impl TrustedPeers { let file = File::open(&path) .wrap_err_with(|| format!("Failed to open trusted peers file {:?}", &path))?; let reader = BufReader::new(file); - let trusted_peers: HashSet = - serde_json::from_reader(reader).wrap_err("Failed to deserialize json from reader")?; - Ok(TrustedPeers { - peers: trusted_peers, - }) + serde_json::from_reader(reader) + .wrap_err("Failed to deserialize json from reader") + .map_err(Into::into) } } @@ -216,10 +169,10 @@ pub mod tests { block_time_ms, trusted_peers, commit_time_limit_ms, + max_transactions_in_block, actor_channel_capacity, gossip_batch_size, gossip_period_ms, - max_transactions_in_block, #[cfg(debug_assertions)] debug_force_soft_fork } diff --git a/config/src/telemetry.rs b/config/src/telemetry.rs index 1d50a96c1b9..d347df8b050 100644 --- a/config/src/telemetry.rs +++ b/config/src/telemetry.rs @@ -1,5 +1,4 @@ //! Module for telemetry-related configuration and structs. -#![allow(clippy::std_instead_of_core)] use iroha_config_base::derive::{Documented, Proxy}; use serde::{Deserialize, Serialize}; use url::Url; diff --git a/config/src/torii.rs b/config/src/torii.rs index 1797d8e7e07..1c2b801e981 100644 --- a/config/src/torii.rs +++ b/config/src/torii.rs @@ -1,6 +1,4 @@ //! `Torii` configuration as well as the default values for the URLs used for the main endpoints: `p2p`, `telemetry`, but not `api`. -#![allow(clippy::std_instead_of_core, clippy::arithmetic_side_effects)] -use std::num::NonZeroU64; use iroha_config_base::derive::{Documented, Proxy}; use iroha_primitives::addr::{socket_addr, SocketAddr}; @@ -8,18 +6,10 @@ use serde::{Deserialize, Serialize}; /// Default socket for p2p communication pub const DEFAULT_TORII_P2P_ADDR: SocketAddr = socket_addr!(127.0.0.1:1337); -/// Default socket for reporting internal status and metrics -pub const DEFAULT_TORII_TELEMETRY_ADDR: SocketAddr = socket_addr!(127.0.0.1:8180); /// Default maximum size of single transaction pub const DEFAULT_TORII_MAX_TRANSACTION_SIZE: u32 = 2_u32.pow(15); /// Default upper bound on `content-length` specified in the HTTP request header pub const DEFAULT_TORII_MAX_CONTENT_LENGTH: u32 = 2_u32.pow(12) * 4000; -/// Default max size of a single batch of results from a query -pub static DEFAULT_TORII_FETCH_SIZE: once_cell::sync::Lazy = - once_cell::sync::Lazy::new(|| NonZeroU64::new(10).unwrap()); -/// Default max time a query can remain in the store unaccessed -pub static DEFAULT_TORII_QUERY_IDLE_TIME_MS: once_cell::sync::Lazy = - once_cell::sync::Lazy::new(|| NonZeroU64::new(30_000).unwrap()); /// Structure that defines the configuration parameters of `Torii` which is the routing module. /// For example the `p2p_addr`, which is used for consensus and block-synchronisation purposes, @@ -34,17 +24,10 @@ pub struct Configuration { /// Torii address for client API. #[config(serde_as_str)] pub api_url: SocketAddr, - /// Torii address for reporting internal status and metrics for administration. - #[config(serde_as_str)] - pub telemetry_url: SocketAddr, /// Maximum number of bytes in raw transaction. Used to prevent from DOS attacks. pub max_transaction_size: u32, /// Maximum number of bytes in raw message. Used to prevent from DOS attacks. pub max_content_len: u32, - /// How many query results are returned in one batch - pub fetch_size: NonZeroU64, - /// Time query can remain in the store if unaccessed - pub query_idle_time_ms: NonZeroU64, } impl Default for ConfigurationProxy { @@ -52,11 +35,8 @@ impl Default for ConfigurationProxy { Self { p2p_addr: None, api_url: None, - telemetry_url: None, max_transaction_size: Some(DEFAULT_TORII_MAX_TRANSACTION_SIZE), max_content_len: Some(DEFAULT_TORII_MAX_CONTENT_LENGTH), - fetch_size: Some(*DEFAULT_TORII_FETCH_SIZE), - query_idle_time_ms: Some(*DEFAULT_TORII_QUERY_IDLE_TIME_MS), } } } @@ -107,14 +87,11 @@ pub mod tests { ( p2p_addr in prop::option::of(Just(DEFAULT_TORII_P2P_ADDR)), api_url in prop::option::of(Just(uri::DEFAULT_API_ADDR)), - telemetry_url in prop::option::of(Just(DEFAULT_TORII_TELEMETRY_ADDR)), max_transaction_size in prop::option::of(Just(DEFAULT_TORII_MAX_TRANSACTION_SIZE)), max_content_len in prop::option::of(Just(DEFAULT_TORII_MAX_CONTENT_LENGTH)), - fetch_size in prop::option::of(Just(*DEFAULT_TORII_FETCH_SIZE)), - query_idle_time_ms in prop::option::of(Just(*DEFAULT_TORII_QUERY_IDLE_TIME_MS)), ) -> ConfigurationProxy { - ConfigurationProxy { p2p_addr, api_url, telemetry_url, max_transaction_size, max_content_len, fetch_size, query_idle_time_ms } + ConfigurationProxy { p2p_addr, api_url, max_transaction_size, max_content_len } } } } diff --git a/config/src/wasm.rs b/config/src/wasm.rs index 570f97efebf..0528da996ed 100644 --- a/config/src/wasm.rs +++ b/config/src/wasm.rs @@ -1,5 +1,4 @@ //! Module for wasm-related configuration and structs. -#![allow(clippy::std_instead_of_core, clippy::arithmetic_side_effects)] use iroha_config_base::derive::{Documented, Proxy}; use serde::{Deserialize, Serialize}; diff --git a/config/src/wsv.rs b/config/src/wsv.rs index b3b6fad84c0..aacc58734be 100644 --- a/config/src/wsv.rs +++ b/config/src/wsv.rs @@ -1,6 +1,4 @@ //! Module for `WorldStateView`-related configuration and structs. -#![allow(clippy::std_instead_of_core)] - use default::*; use iroha_config_base::derive::{Documented, Proxy}; use iroha_data_model::{prelude::*, transaction::TransactionLimits}; diff --git a/configs/client/config.json b/configs/client/config.json index 88dfd179898..5ed2399d626 100644 --- a/configs/client/config.json +++ b/configs/client/config.json @@ -10,7 +10,6 @@ "password": "ilovetea" }, "TORII_API_URL": "http://127.0.0.1:8080/", - "TORII_TELEMETRY_URL": "http://127.0.0.1:8180/", "TRANSACTION_TIME_TO_LIVE_MS": 100000, "TRANSACTION_STATUS_TIMEOUT_MS": 15000, "TRANSACTION_LIMITS": { diff --git a/configs/client/lts/config.json b/configs/client/lts/config.json index 88dfd179898..e1763c4d801 100644 --- a/configs/client/lts/config.json +++ b/configs/client/lts/config.json @@ -1,21 +1,95 @@ { - "PUBLIC_KEY": "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0", - "PRIVATE_KEY": { - "digest_function": "ed25519", - "payload": "9ac47abf59b356e0bd7dcbbbb4dec080e302156a48ca907e47cb6aea1d32719e7233bfc89dcbd68c19fde6ce6158225298ec1131b6a130d1aeb454c1ab5183c0" - }, - "ACCOUNT_ID": "alice@wonderland", - "BASIC_AUTH": { - "web_login": "mad_hatter", - "password": "ilovetea" - }, - "TORII_API_URL": "http://127.0.0.1:8080/", - "TORII_TELEMETRY_URL": "http://127.0.0.1:8180/", - "TRANSACTION_TIME_TO_LIVE_MS": 100000, - "TRANSACTION_STATUS_TIMEOUT_MS": 15000, - "TRANSACTION_LIMITS": { - "max_instruction_number": 4096, - "max_wasm_size_bytes": 4194304 - }, - "ADD_TRANSACTION_NONCE": false + "PUBLIC_KEY": null, + "PRIVATE_KEY": null, + "DISABLE_PANIC_TERMINAL_COLORS": false, + "KURA": { + "INIT_MODE": "strict", + "BLOCK_STORE_PATH": "./storage", + "BLOCKS_PER_STORAGE_FILE": 1000, + "ACTOR_CHANNEL_CAPACITY": 100, + "DEBUG_OUTPUT_NEW_BLOCKS": false + }, + "SUMERAGI": { + "KEY_PAIR": null, + "PEER_ID": null, + "BLOCK_TIME_MS": 1000, + "TRUSTED_PEERS": null, + "COMMIT_TIME_LIMIT_MS": 2000, + "TX_RECEIPT_TIME_LIMIT_MS": 500, + "TRANSACTION_LIMITS": { + "max_instruction_number": 4096, + "max_wasm_size_bytes": 4194304 + }, + "ACTOR_CHANNEL_CAPACITY": 100, + "GOSSIP_BATCH_SIZE": 500, + "GOSSIP_PERIOD_MS": 1000 + }, + "TORII": { + "P2P_ADDR": null, + "API_URL": null, + "TELEMETRY_URL": null, + "MAX_TRANSACTION_SIZE": 32768, + "MAX_CONTENT_LEN": 16384000 + }, + "BLOCK_SYNC": { + "GOSSIP_PERIOD_MS": 10000, + "BLOCK_BATCH_SIZE": 4, + "ACTOR_CHANNEL_CAPACITY": 100 + }, + "QUEUE": { + "MAXIMUM_TRANSACTIONS_IN_BLOCK": 8192, + "MAXIMUM_TRANSACTIONS_IN_QUEUE": 65536, + "TRANSACTION_TIME_TO_LIVE_MS": 86400000, + "FUTURE_THRESHOLD_MS": 1000 + }, + "LOGGER": { + "MAX_LOG_LEVEL": "INFO", + "TELEMETRY_CAPACITY": 1000, + "COMPACT_MODE": false, + "LOG_FILE_PATH": null, + "TERMINAL_COLORS": true + }, + "GENESIS": { + "ACCOUNT_PUBLIC_KEY": null, + "ACCOUNT_PRIVATE_KEY": null, + "WAIT_FOR_PEERS_RETRY_COUNT_LIMIT": 100, + "WAIT_FOR_PEERS_RETRY_PERIOD_MS": 500, + "GENESIS_SUBMISSION_DELAY_MS": 1000 + }, + "WSV": { + "ASSET_METADATA_LIMITS": { + "max_len": 1048576, + "max_entry_byte_size": 4096 + }, + "ASSET_DEFINITION_METADATA_LIMITS": { + "max_len": 1048576, + "max_entry_byte_size": 4096 + }, + "ACCOUNT_METADATA_LIMITS": { + "max_len": 1048576, + "max_entry_byte_size": 4096 + }, + "DOMAIN_METADATA_LIMITS": { + "max_len": 1048576, + "max_entry_byte_size": 4096 + }, + "IDENT_LENGTH_LIMITS": { + "min": 1, + "max": 128 + }, + "WASM_RUNTIME_CONFIG": { + "FUEL_LIMIT": 1000000, + "MAX_MEMORY": 524288000 + } + }, + "NETWORK": { + "ACTOR_CHANNEL_CAPACITY": 100 + }, + "TELEMETRY": { + "NAME": null, + "URL": null, + "MIN_RETRY_PERIOD": 1, + "MAX_RETRY_DELAY_EXPONENT": 4, + "FILE": null + } } diff --git a/configs/client/stable/config.json b/configs/client/stable/config.json index 88dfd179898..e1763c4d801 100644 --- a/configs/client/stable/config.json +++ b/configs/client/stable/config.json @@ -1,21 +1,95 @@ { - "PUBLIC_KEY": "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0", - "PRIVATE_KEY": { - "digest_function": "ed25519", - "payload": "9ac47abf59b356e0bd7dcbbbb4dec080e302156a48ca907e47cb6aea1d32719e7233bfc89dcbd68c19fde6ce6158225298ec1131b6a130d1aeb454c1ab5183c0" - }, - "ACCOUNT_ID": "alice@wonderland", - "BASIC_AUTH": { - "web_login": "mad_hatter", - "password": "ilovetea" - }, - "TORII_API_URL": "http://127.0.0.1:8080/", - "TORII_TELEMETRY_URL": "http://127.0.0.1:8180/", - "TRANSACTION_TIME_TO_LIVE_MS": 100000, - "TRANSACTION_STATUS_TIMEOUT_MS": 15000, - "TRANSACTION_LIMITS": { - "max_instruction_number": 4096, - "max_wasm_size_bytes": 4194304 - }, - "ADD_TRANSACTION_NONCE": false + "PUBLIC_KEY": null, + "PRIVATE_KEY": null, + "DISABLE_PANIC_TERMINAL_COLORS": false, + "KURA": { + "INIT_MODE": "strict", + "BLOCK_STORE_PATH": "./storage", + "BLOCKS_PER_STORAGE_FILE": 1000, + "ACTOR_CHANNEL_CAPACITY": 100, + "DEBUG_OUTPUT_NEW_BLOCKS": false + }, + "SUMERAGI": { + "KEY_PAIR": null, + "PEER_ID": null, + "BLOCK_TIME_MS": 1000, + "TRUSTED_PEERS": null, + "COMMIT_TIME_LIMIT_MS": 2000, + "TX_RECEIPT_TIME_LIMIT_MS": 500, + "TRANSACTION_LIMITS": { + "max_instruction_number": 4096, + "max_wasm_size_bytes": 4194304 + }, + "ACTOR_CHANNEL_CAPACITY": 100, + "GOSSIP_BATCH_SIZE": 500, + "GOSSIP_PERIOD_MS": 1000 + }, + "TORII": { + "P2P_ADDR": null, + "API_URL": null, + "TELEMETRY_URL": null, + "MAX_TRANSACTION_SIZE": 32768, + "MAX_CONTENT_LEN": 16384000 + }, + "BLOCK_SYNC": { + "GOSSIP_PERIOD_MS": 10000, + "BLOCK_BATCH_SIZE": 4, + "ACTOR_CHANNEL_CAPACITY": 100 + }, + "QUEUE": { + "MAXIMUM_TRANSACTIONS_IN_BLOCK": 8192, + "MAXIMUM_TRANSACTIONS_IN_QUEUE": 65536, + "TRANSACTION_TIME_TO_LIVE_MS": 86400000, + "FUTURE_THRESHOLD_MS": 1000 + }, + "LOGGER": { + "MAX_LOG_LEVEL": "INFO", + "TELEMETRY_CAPACITY": 1000, + "COMPACT_MODE": false, + "LOG_FILE_PATH": null, + "TERMINAL_COLORS": true + }, + "GENESIS": { + "ACCOUNT_PUBLIC_KEY": null, + "ACCOUNT_PRIVATE_KEY": null, + "WAIT_FOR_PEERS_RETRY_COUNT_LIMIT": 100, + "WAIT_FOR_PEERS_RETRY_PERIOD_MS": 500, + "GENESIS_SUBMISSION_DELAY_MS": 1000 + }, + "WSV": { + "ASSET_METADATA_LIMITS": { + "max_len": 1048576, + "max_entry_byte_size": 4096 + }, + "ASSET_DEFINITION_METADATA_LIMITS": { + "max_len": 1048576, + "max_entry_byte_size": 4096 + }, + "ACCOUNT_METADATA_LIMITS": { + "max_len": 1048576, + "max_entry_byte_size": 4096 + }, + "DOMAIN_METADATA_LIMITS": { + "max_len": 1048576, + "max_entry_byte_size": 4096 + }, + "IDENT_LENGTH_LIMITS": { + "min": 1, + "max": 128 + }, + "WASM_RUNTIME_CONFIG": { + "FUEL_LIMIT": 1000000, + "MAX_MEMORY": 524288000 + } + }, + "NETWORK": { + "ACTOR_CHANNEL_CAPACITY": 100 + }, + "TELEMETRY": { + "NAME": null, + "URL": null, + "MIN_RETRY_PERIOD": 1, + "MAX_RETRY_DELAY_EXPONENT": 4, + "FILE": null + } } diff --git a/configs/peer/config.json b/configs/peer/config.json index e28996bdf9b..b25acc16f1b 100644 --- a/configs/peer/config.json +++ b/configs/peer/config.json @@ -2,6 +2,7 @@ "PUBLIC_KEY": null, "PRIVATE_KEY": null, "DISABLE_PANIC_TERMINAL_COLORS": false, + "EXIT_AFTER_INIT": false, "KURA": { "INIT_MODE": "strict", "BLOCK_STORE_PATH": "./storage", @@ -23,11 +24,8 @@ "TORII": { "P2P_ADDR": null, "API_URL": null, - "TELEMETRY_URL": null, "MAX_TRANSACTION_SIZE": 32768, - "MAX_CONTENT_LEN": 16384000, - "FETCH_SIZE": 10, - "QUERY_IDLE_TIME_MS": 30000 + "MAX_CONTENT_LEN": 16384000 }, "BLOCK_SYNC": { "GOSSIP_PERIOD_MS": 10000, @@ -95,5 +93,8 @@ "CREATE_EVERY_MS": 60000, "DIR_PATH": "./storage", "CREATION_ENABLED": true + }, + "LIVE_QUERY_STORE": { + "QUERY_IDLE_TIME_MS": 30000 } } diff --git a/configs/peer/executor.wasm b/configs/peer/executor.wasm new file mode 100644 index 00000000000..bc3c581289e Binary files /dev/null and b/configs/peer/executor.wasm differ diff --git a/configs/peer/genesis.json b/configs/peer/genesis.json index fa69236b6d5..2ca5d0365ed 100644 --- a/configs/peer/genesis.json +++ b/configs/peer/genesis.json @@ -197,5 +197,5 @@ } ] ], - "validator": "./validator.wasm" + "executor": "./executor.wasm" } diff --git a/configs/peer/lts/config.json b/configs/peer/lts/config.json index e1763c4d801..ef36a9f525c 100644 --- a/configs/peer/lts/config.json +++ b/configs/peer/lts/config.json @@ -12,14 +12,10 @@ "SUMERAGI": { "KEY_PAIR": null, "PEER_ID": null, - "BLOCK_TIME_MS": 1000, + "BLOCK_TIME_MS": 2000, "TRUSTED_PEERS": null, - "COMMIT_TIME_LIMIT_MS": 2000, - "TX_RECEIPT_TIME_LIMIT_MS": 500, - "TRANSACTION_LIMITS": { - "max_instruction_number": 4096, - "max_wasm_size_bytes": 4194304 - }, + "COMMIT_TIME_LIMIT_MS": 4000, + "MAX_TRANSACTIONS_IN_BLOCK": 512, "ACTOR_CHANNEL_CAPACITY": 100, "GOSSIP_BATCH_SIZE": 500, "GOSSIP_PERIOD_MS": 1000 @@ -27,9 +23,10 @@ "TORII": { "P2P_ADDR": null, "API_URL": null, - "TELEMETRY_URL": null, "MAX_TRANSACTION_SIZE": 32768, - "MAX_CONTENT_LEN": 16384000 + "MAX_CONTENT_LEN": 16384000, + "FETCH_SIZE": 10, + "QUERY_IDLE_TIME_MS": 30000 }, "BLOCK_SYNC": { "GOSSIP_PERIOD_MS": 10000, @@ -37,8 +34,8 @@ "ACTOR_CHANNEL_CAPACITY": 100 }, "QUEUE": { - "MAXIMUM_TRANSACTIONS_IN_BLOCK": 8192, - "MAXIMUM_TRANSACTIONS_IN_QUEUE": 65536, + "MAX_TRANSACTIONS_IN_QUEUE": 65536, + "MAX_TRANSACTIONS_IN_QUEUE_PER_USER": 65536, "TRANSACTION_TIME_TO_LIVE_MS": 86400000, "FUTURE_THRESHOLD_MS": 1000 }, @@ -51,10 +48,7 @@ }, "GENESIS": { "ACCOUNT_PUBLIC_KEY": null, - "ACCOUNT_PRIVATE_KEY": null, - "WAIT_FOR_PEERS_RETRY_COUNT_LIMIT": 100, - "WAIT_FOR_PEERS_RETRY_PERIOD_MS": 500, - "GENESIS_SUBMISSION_DELAY_MS": 1000 + "ACCOUNT_PRIVATE_KEY": null }, "WSV": { "ASSET_METADATA_LIMITS": { @@ -77,8 +71,12 @@ "min": 1, "max": 128 }, + "TRANSACTION_LIMITS": { + "max_instruction_number": 4096, + "max_wasm_size_bytes": 4194304 + }, "WASM_RUNTIME_CONFIG": { - "FUEL_LIMIT": 1000000, + "FUEL_LIMIT": 23000000, "MAX_MEMORY": 524288000 } }, @@ -91,5 +89,10 @@ "MIN_RETRY_PERIOD": 1, "MAX_RETRY_DELAY_EXPONENT": 4, "FILE": null + }, + "SNAPSHOT": { + "CREATE_EVERY_MS": 60000, + "DIR_PATH": "./storage", + "CREATION_ENABLED": true } } diff --git a/configs/peer/lts/executor.wasm b/configs/peer/lts/executor.wasm new file mode 100644 index 00000000000..b74e020ea15 Binary files /dev/null and b/configs/peer/lts/executor.wasm differ diff --git a/configs/peer/lts/genesis.json b/configs/peer/lts/genesis.json index 94e4254be41..2ca5d0365ed 100644 --- a/configs/peer/lts/genesis.json +++ b/configs/peer/lts/genesis.json @@ -1,349 +1,201 @@ { "transactions": [ - { - "isi": [ - { - "Register": { - "Identifiable": { - "NewDomain": { - "id": "wonderland", - "logo": null, - "metadata": { - "key": { - "String": "value" - } - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "NewAccount": { - "id": "alice@wonderland", - "signatories": [ - "ed01207233bfc89dcbd68c19fde6ce6158225298ec1131b6a130d1aeb454c1ab5183c0" - ], - "metadata": { - "key": { - "String": "value" - } - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "NewAccount": { - "id": "bob@wonderland", - "signatories": [ - "ed01207233bfc89dcbd68c19fde6ce6158225298ec1131b6a130d1aeb454c1ab5183c0" - ], - "metadata": { - "key": { - "String": "value" - } - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "NewAssetDefinition": { - "id": "rose#wonderland", - "value_type": "Quantity", - "mintable": "Infinitely", - "metadata": {} - } - } - } - }, - { - "Register": { - "Identifiable": { - "NewDomain": { - "id": "garden_of_live_flowers", - "logo": null, - "metadata": {} - } - } - } - }, - { - "Register": { - "Identifiable": { - "NewAccount": { - "id": "carpenter@garden_of_live_flowers", - "signatories": [ - "ed01207233bfc89dcbd68c19fde6ce6158225298ec1131b6a130d1aeb454c1ab5183c0" - ], - "metadata": {} - } - } - } - }, - { - "Register": { - "Identifiable": { - "NewAssetDefinition": { - "id": "cabbage#garden_of_live_flowers", - "value_type": "Quantity", - "mintable": "Infinitely", - "metadata": {} - } - } - } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_unregister_asset_with_definition", - "params": { - "asset_definition_id": "Id" - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_burn_asset_with_definition", - "params": { - "asset_definition_id": "Id" - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_burn_user_assets", - "params": { - "asset_id": "Id" - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_set_key_value_in_user_assets", - "params": { - "asset_id": "Id" - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_remove_key_value_in_user_assets", - "params": { - "asset_id": "Id" - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_set_key_value_in_user_metadata", - "params": { - "account_id": "Id" - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_remove_key_value_in_user_metadata", - "params": { - "account_id": "Id" - } + [ + { + "Register": { + "NewDomain": { + "id": "wonderland", + "logo": null, + "metadata": { + "key": { + "String": "value" } } } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_set_key_value_in_asset_definition", - "params": { - "asset_definition_id": "Id" - } - } - } - } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_remove_key_value_in_asset_definition", - "params": { - "asset_definition_id": "Id" - } + } + }, + { + "Register": { + "NewAccount": { + "id": "alice@wonderland", + "signatories": [ + "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" + ], + "metadata": { + "key": { + "String": "value" } } } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_mint_user_asset_definitions", - "params": { - "asset_definition_id": "Id" - } + } + }, + { + "Register": { + "NewAccount": { + "id": "bob@wonderland", + "signatories": [ + "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" + ], + "metadata": { + "key": { + "String": "value" } } } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_transfer_user_assets", - "params": { - "asset_id": "Id" - } - } - } + } + }, + { + "Register": { + "NewAssetDefinition": { + "id": "rose#wonderland", + "value_type": "Quantity", + "mintable": "Infinitely", + "logo": null, + "metadata": {} } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "can_transfer_only_fixed_number_of_times_per_period", - "params": { - "count": "U32", - "period": "U128" - } - } - } + } + }, + { + "Register": { + "NewDomain": { + "id": "garden_of_live_flowers", + "logo": null, + "metadata": {} } - }, - { - "Mint": { - "object": { - "U32": 13 - }, - "destination_id": { - "Id": { - "AssetId": "rose##alice@wonderland" - } - } + } + }, + { + "Register": { + "NewAccount": { + "id": "carpenter@garden_of_live_flowers", + "signatories": [ + "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" + ], + "metadata": {} } - }, - { - "Mint": { - "object": { - "U32": 44 - }, - "destination_id": { - "Id": { - "AssetId": "cabbage#garden_of_live_flowers#alice@wonderland" - } - } + } + }, + { + "Register": { + "NewAssetDefinition": { + "id": "cabbage#garden_of_live_flowers", + "value_type": "Quantity", + "mintable": "Infinitely", + "logo": null, + "metadata": {} } - }, - { - "Register": { - "Identifiable": { - "PermissionTokenDefinition": { - "id": "allowed_to_do_stuff", - "params": {} - } - } + } + }, + { + "Mint": { + "object": "13_u32", + "destination_id": { + "AssetId": "rose##alice@wonderland" } - }, - { - "Register": { - "Identifiable": { - "NewRole": { - "inner": { - "id": "USER_METADATA_ACCESS", - "permissions": [ - { - "definition_id": "can_remove_key_value_in_user_metadata", - "params": { - "account_id": { - "Id": { - "AccountId": "alice@wonderland" - } - } - } - }, - { - "definition_id": "can_set_key_value_in_user_metadata", - "params": { - "account_id": { - "Id": { - "AccountId": "alice@wonderland" - } - } - } - } - ] - } - } - } + } + }, + { + "Mint": { + "object": "44_u32", + "destination_id": { + "AssetId": "cabbage#garden_of_live_flowers#alice@wonderland" } - }, - { - "Grant": { - "object": { - "PermissionToken": { - "definition_id": "allowed_to_do_stuff", - "params": {} - } - }, - "destination_id": { - "Id": { - "AccountId": "alice@wonderland" - } - } + } + }, + { + "Grant": { + "object": { + "PermissionToken": { + "definition_id": "CanSetParameters", + "payload": null + } + }, + "destination_id": { + "AccountId": "alice@wonderland" } - }, - { - "Register": { - "Identifiable": { - "NewRole": { - "inner": { - "id": "staff_that_does_stuff_in_genesis", - "permissions": [ - { - "definition_id": "allowed_to_do_stuff", - "params": {} - } - ] - } - } - } + } + }, + { + "Sequence": [ + { + "NewParameter": { + "Parameter": "?MaxTransactionsInBlock=512" + } + }, + { + "NewParameter": { + "Parameter": "?BlockTime=2000" + } + }, + { + "NewParameter": { + "Parameter": "?CommitTimeLimit=4000" + } + }, + { + "NewParameter": { + "Parameter": "?TransactionLimits=4096,4194304_TL" + } + }, + { + "NewParameter": { + "Parameter": "?WSVAssetMetadataLimits=1048576,4096_ML" + } + }, + { + "NewParameter": { + "Parameter": "?WSVAssetDefinitionMetadataLimits=1048576,4096_ML" + } + }, + { + "NewParameter": { + "Parameter": "?WSVAccountMetadataLimits=1048576,4096_ML" + } + }, + { + "NewParameter": { + "Parameter": "?WSVDomainMetadataLimits=1048576,4096_ML" + } + }, + { + "NewParameter": { + "Parameter": "?WSVIdentLengthLimits=1,128_LL" + } + }, + { + "NewParameter": { + "Parameter": "?WASMFuelLimit=23000000" + } + }, + { + "NewParameter": { + "Parameter": "?WASMMaxMemory=524288000" + } + } + ] + }, + { + "Register": { + "NewRole": { + "id": "ALICE_METADATA_ACCESS", + "permissions": [ + { + "definition_id": "CanRemoveKeyValueInUserAccount", + "payload": { + "account_id": "alice@wonderland" + } + }, + { + "definition_id": "CanSetKeyValueInUserAccount", + "payload": { + "account_id": "alice@wonderland" + } + } + ] } } - ] - } - ] + } + ] + ], + "executor": "./executor.wasm" } diff --git a/configs/peer/stable/config.json b/configs/peer/stable/config.json index 56c74756874..ef36a9f525c 100644 --- a/configs/peer/stable/config.json +++ b/configs/peer/stable/config.json @@ -23,7 +23,6 @@ "TORII": { "P2P_ADDR": null, "API_URL": null, - "TELEMETRY_URL": null, "MAX_TRANSACTION_SIZE": 32768, "MAX_CONTENT_LEN": 16384000, "FETCH_SIZE": 10, @@ -90,5 +89,10 @@ "MIN_RETRY_PERIOD": 1, "MAX_RETRY_DELAY_EXPONENT": 4, "FILE": null + }, + "SNAPSHOT": { + "CREATE_EVERY_MS": 60000, + "DIR_PATH": "./storage", + "CREATION_ENABLED": true } } diff --git a/configs/peer/stable/executor.wasm b/configs/peer/stable/executor.wasm new file mode 100644 index 00000000000..b74e020ea15 Binary files /dev/null and b/configs/peer/stable/executor.wasm differ diff --git a/configs/peer/stable/genesis.json b/configs/peer/stable/genesis.json index cfc3b0f05b5..2ca5d0365ed 100644 --- a/configs/peer/stable/genesis.json +++ b/configs/peer/stable/genesis.json @@ -88,7 +88,7 @@ }, { "Mint": { - "U32": 13, + "object": "13_u32", "destination_id": { "AssetId": "rose##alice@wonderland" } @@ -96,7 +96,7 @@ }, { "Mint": { - "U32": 44, + "object": "44_u32", "destination_id": { "AssetId": "cabbage#garden_of_live_flowers#alice@wonderland" } @@ -104,9 +104,11 @@ }, { "Grant": { - "PermissionToken": { - "definition_id": "CanSetParameters", - "payload": null + "object": { + "PermissionToken": { + "definition_id": "CanSetParameters", + "payload": null + } }, "destination_id": { "AccountId": "alice@wonderland" @@ -195,5 +197,5 @@ } ] ], - "validator": "./validator.wasm" + "executor": "./executor.wasm" } diff --git a/configs/peer/validator.wasm b/configs/peer/validator.wasm deleted file mode 100644 index 78b3a638072..00000000000 Binary files a/configs/peer/validator.wasm and /dev/null differ diff --git a/core/Cargo.toml b/core/Cargo.toml index 94b21f93d77..73a9f5c63b8 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -14,6 +14,9 @@ license.workspace = true keywords.workspace = true categories.workspace = true +[lints] +workspace = true + [features] default = ["bridge", "cli", "telemetry"] @@ -65,9 +68,7 @@ displaydoc = { workspace = true } wasmtime = { workspace = true } parking_lot = { workspace = true, features = ["deadlock_detection"] } derive_more = { workspace = true } -itertools = { workspace = true } - -sealed = "0.5.0" +uuid = { version = "1.4.1", features = ["v4"] } [dev-dependencies] criterion = { workspace = true } @@ -75,7 +76,7 @@ hex = { workspace = true } once_cell = { workspace = true } tempfile = { workspace = true } -byte-unit = "4.0.18" +byte-unit = "4.0.19" [[bench]] name = "validation" @@ -88,22 +89,22 @@ harness = false [[bench]] name = "apply_blocks" harness = false -path = "benches/apply_blocks/benchmark.rs" +path = "benches/blocks/apply_blocks_benchmark.rs" [[bench]] name = "validate_blocks" harness = false -path = "benches/validate_blocks/benchmark.rs" +path = "benches/blocks/validate_blocks_benchmark.rs" [[example]] name = "apply_blocks" harness = false -path = "benches/apply_blocks/oneshot.rs" +path = "benches/blocks/apply_blocks_oneshot.rs" [[example]] name = "validate_blocks" harness = false -path = "benches/validate_blocks/oneshot.rs" +path = "benches/blocks/validate_blocks_oneshot.rs" [package.metadata.cargo-all-features] denylist = [ diff --git a/core/benches/apply_blocks/apply_blocks.rs b/core/benches/apply_blocks/apply_blocks.rs deleted file mode 100644 index 917d29dee22..00000000000 --- a/core/benches/apply_blocks/apply_blocks.rs +++ /dev/null @@ -1,239 +0,0 @@ -#![allow(missing_docs, clippy::restriction)] - -use std::{collections::BTreeSet, str::FromStr as _}; - -use eyre::Result; -use iroha_config::sumeragi::default::DEFAULT_CONSENSUS_ESTIMATION_MS; -use iroha_core::{block::PendingBlock, prelude::*, wsv::World}; -use iroha_crypto::{HashOf, MerkleTree, SignatureOf, SignaturesOf}; -use iroha_data_model::{ - asset::{AssetDefinition, AssetDefinitionId}, - block::{BlockHeader, VersionedCommittedBlock}, - isi::InstructionBox, - prelude::*, -}; - -/// Create block, bypassing validation -fn create_block( - height: u64, - previous_block_hash: Option>, - instructions: Vec, - account_id: AccountId, - key_pair: KeyPair, -) -> Result { - let transaction = TransactionBuilder::new(account_id) - .with_instructions(instructions) - .sign(key_pair.clone())?; - - let transactions_hash = [&transaction] - .iter() - .map(|tx| tx.hash()) - .collect::>() - .hash(); - let timestamp = current_time().as_millis(); - let header = BlockHeader { - timestamp, - consensus_estimation: DEFAULT_CONSENSUS_ESTIMATION_MS, - height, - view_change_index: 1, - previous_block_hash, - transactions_hash, // Single transaction is merkle root hash - rejected_transactions_hash: None, - committed_with_topology: Vec::new(), - }; - - let signature = SignatureOf::from_hash( - key_pair, - HashOf::from_untyped_unchecked(Hash::new(header.payload())), - )?; - let signatures = SignaturesOf::from(signature); - - let pending_block = PendingBlock { - header, - transactions: vec![TransactionValue { - value: transaction, - error: None, - }], - event_recommendations: Vec::new(), - signatures, - }; - - Ok(pending_block.commit_unchecked().into()) -} - -fn delete_every_nth( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, - nth: usize, -) -> Result> { - let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = DomainId::from_str(&i.to_string())?; - if i % nth == 0 { - instructions.push(UnregisterBox::new(domain_id.clone()).into()); - } else { - for j in 0..accounts_per_domain { - if j % nth == 0 { - let account_id = - AccountId::new(Name::from_str(&j.to_string())?, domain_id.clone()); - instructions.push(UnregisterBox::new(account_id.clone()).into()); - } - } - for k in 0..assets_per_domain { - if k % nth == 0 { - let asset_definition_id = - AssetDefinitionId::new(Name::from_str(&k.to_string())?, domain_id.clone()); - instructions.push(UnregisterBox::new(asset_definition_id).into()); - } - } - } - } - Ok(instructions) -} - -fn restore_every_nth( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, - nth: usize, -) -> Result> { - let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = DomainId::from_str(&i.to_string())?; - if i % nth == 0 { - let domain = Domain::new(domain_id.clone()); - instructions.push(RegisterBox::new(domain).into()); - } - for j in 0..accounts_per_domain { - if j % nth == 0 || i % nth == 0 { - let account_id = AccountId::new(Name::from_str(&j.to_string())?, domain_id.clone()); - let account = Account::new(account_id.clone(), []); - instructions.push(RegisterBox::new(account).into()); - } - } - for k in 0..assets_per_domain { - if k % nth == 0 || i % nth == 0 { - let asset_definition_id = - AssetDefinitionId::new(Name::from_str(&k.to_string())?, domain_id.clone()); - let asset_definition = AssetDefinition::new( - asset_definition_id, - iroha_data_model::asset::AssetValueType::Quantity, - ); - instructions.push(RegisterBox::new(asset_definition).into()); - } - } - } - Ok(instructions) -} - -fn build_wsv( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, - account_id: AccountId, - key_pair: KeyPair, -) -> Result { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(World::with([], BTreeSet::new()), kura); - - let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = DomainId::from_str(&i.to_string())?; - let domain = Domain::new(domain_id.clone()); - instructions.push(RegisterBox::new(domain).into()); - for j in 0..accounts_per_domain { - let account_id = AccountId::new(Name::from_str(&j.to_string())?, domain_id.clone()); - let account = Account::new(account_id.clone(), []); - instructions.push(RegisterBox::new(account).into()); - } - for k in 0..assets_per_domain { - let asset_definition_id = - AssetDefinitionId::new(Name::from_str(&k.to_string())?, domain_id.clone()); - let asset_definition = AssetDefinition::new( - asset_definition_id, - iroha_data_model::asset::AssetValueType::Quantity, - ); - instructions.push(RegisterBox::new(asset_definition).into()); - } - } - - let block = create_block(1, None, instructions, account_id, key_pair)?; - - wsv.apply(&block)?; - - Ok(wsv) -} - -pub struct WsvApplyBlocks { - wsv: WorldStateView, - blocks: Vec, -} - -impl WsvApplyBlocks { - /// Create [`WorldStateView`] and blocks for benchmarking - /// - /// # Errors - /// - Failed to parse [`AccountId`] - /// - Failed to generate [`KeyPair`] - /// - Failed to create instructions for block - pub fn setup() -> Result { - let domains = 100; - let accounts_per_domain = 1000; - let assets_per_domain = 1000; - let genesis_id: AccountId = "genesis@genesis".parse()?; - let key_pair = KeyPair::generate()?; - let wsv = build_wsv( - domains, - accounts_per_domain, - assets_per_domain, - genesis_id.clone(), - key_pair.clone(), - )?; - - let nth = 100; - let instructions = [ - delete_every_nth(domains, accounts_per_domain, assets_per_domain, nth), - restore_every_nth(domains, accounts_per_domain, assets_per_domain, nth), - ] - .into_iter() - .collect::, _>>()?; - - let mut previous_block_hash = wsv.latest_block_hash(); - let mut blocks = Vec::new(); - for (instructions, height) in instructions.into_iter().zip(wsv.height() + 1..) { - let block = create_block( - height, - previous_block_hash, - instructions, - genesis_id.clone(), - key_pair.clone(), - )?; - previous_block_hash = Some(block.hash()); - blocks.push(block); - } - - Ok(Self { wsv, blocks }) - } - - /// Run benchmark body. - /// - /// # Errors - /// - Not enough blocks - /// - Failed to apply block - /// - /// # Panics - /// If wsv isn't one block ahead of finalized wsv. - pub fn measure(Self { wsv, blocks }: &Self) -> Result<()> { - let mut finalized_wsv = wsv.clone(); - let mut wsv = finalized_wsv.clone(); - - for block in blocks { - finalized_wsv = wsv.clone(); - wsv.apply(block)?; - assert_eq!(wsv.height(), finalized_wsv.height() + 1); - } - - Ok(()) - } -} diff --git a/core/benches/apply_blocks/benchmark.rs b/core/benches/apply_blocks/benchmark.rs deleted file mode 100644 index 20079ab1527..00000000000 --- a/core/benches/apply_blocks/benchmark.rs +++ /dev/null @@ -1,22 +0,0 @@ -#![allow(missing_docs, clippy::restriction)] - -mod apply_blocks; - -use apply_blocks::WsvApplyBlocks; -use criterion::{black_box, criterion_group, criterion_main, Criterion}; - -fn apply_blocks(c: &mut Criterion) { - let bench = WsvApplyBlocks::setup().expect("Failed to setup benchmark"); - - let mut group = c.benchmark_group("apply_blocks"); - group.significance_level(0.1).sample_size(10); - group.bench_function("apply_blocks", |b| { - b.iter(|| { - WsvApplyBlocks::measure(black_box(&bench)).expect("Failed to execute benchmark"); - }); - }); - group.finish(); -} - -criterion_group!(wsv, apply_blocks); -criterion_main!(wsv); diff --git a/core/benches/apply_blocks/oneshot.rs b/core/benches/apply_blocks/oneshot.rs deleted file mode 100644 index 2a2950d5371..00000000000 --- a/core/benches/apply_blocks/oneshot.rs +++ /dev/null @@ -1,15 +0,0 @@ -//! Oneshot execution of `apply_blocks` benchmark. -//! Can be useful to profile using flamegraph. -//! -//! ```bash -//! CARGO_PROFILE_RELEASE_DEBUG=true cargo flamegraph --root --release --example apply_blocks -//! ``` - -mod apply_blocks; - -use apply_blocks::WsvApplyBlocks; - -fn main() { - let bench = WsvApplyBlocks::setup().expect("Failed to setup benchmark"); - WsvApplyBlocks::measure(&bench).expect("Failed to execute bnechmark"); -} diff --git a/core/benches/blocks/apply_blocks.rs b/core/benches/blocks/apply_blocks.rs new file mode 100644 index 00000000000..6a996a4d9e1 --- /dev/null +++ b/core/benches/blocks/apply_blocks.rs @@ -0,0 +1,75 @@ +use eyre::Result; +use iroha_core::{block::CommittedBlock, prelude::*}; +use iroha_data_model::prelude::*; + +#[path = "./common.rs"] +mod common; + +use common::*; + +pub struct WsvApplyBlocks { + wsv: WorldStateView, + blocks: Vec, +} + +impl WsvApplyBlocks { + /// Create [`WorldStateView`] and blocks for benchmarking + /// + /// # Errors + /// - Failed to parse [`AccountId`] + /// - Failed to generate [`KeyPair`] + /// - Failed to create instructions for block + pub fn setup() -> Result { + let domains = 100; + let accounts_per_domain = 1000; + let assets_per_domain = 1000; + let account_id: AccountId = "alice@wonderland".parse()?; + let key_pair = KeyPair::generate()?; + let wsv = build_wsv(&account_id, &key_pair); + + let nth = 100; + let instructions = [ + populate_wsv(domains, accounts_per_domain, assets_per_domain, &account_id), + delete_every_nth(domains, accounts_per_domain, assets_per_domain, nth), + restore_every_nth(domains, accounts_per_domain, assets_per_domain, nth), + ]; + + let blocks = { + // Clone wsv because it will be changed during creation of block + let mut wsv = wsv.clone(); + instructions + .into_iter() + .map(|instructions| { + let block = + create_block(&mut wsv, instructions, account_id.clone(), key_pair.clone()); + wsv.apply_without_execution(&block).map(|_| block) + }) + .collect::, _>>()? + }; + + Ok(Self { wsv, blocks }) + } + + /// Run benchmark body. + /// + /// # Errors + /// - Not enough blocks + /// - Failed to apply block + /// + /// # Panics + /// If wsv isn't one block ahead of finalized wsv. + pub fn measure(Self { wsv, blocks }: &Self) -> Result<()> { + let mut finalized_wsv = wsv.clone(); + let mut wsv = finalized_wsv.clone(); + + assert_eq!(wsv.height(), 0); + for (block, i) in blocks.iter().zip(1..) { + finalized_wsv = wsv.clone(); + wsv.apply(block)?; + assert_eq!(wsv.height(), i); + assert_eq!(wsv.height(), finalized_wsv.height() + 1); + } + + Ok(()) + } +} diff --git a/core/benches/blocks/apply_blocks_benchmark.rs b/core/benches/blocks/apply_blocks_benchmark.rs new file mode 100644 index 00000000000..730d6f13037 --- /dev/null +++ b/core/benches/blocks/apply_blocks_benchmark.rs @@ -0,0 +1,23 @@ +#![allow(missing_docs)] + +mod apply_blocks; + +use apply_blocks::WsvApplyBlocks; +use criterion::{black_box, criterion_group, criterion_main, Criterion}; + +fn apply_blocks(c: &mut Criterion) { + tokio::runtime::Runtime::new().unwrap().block_on(async { + let bench = WsvApplyBlocks::setup().expect("Failed to setup benchmark"); + let mut group = c.benchmark_group("apply_blocks"); + group.significance_level(0.1).sample_size(10); + group.bench_function("apply_blocks", |b| { + b.iter(|| { + WsvApplyBlocks::measure(black_box(&bench)).expect("Failed to execute benchmark"); + }); + }); + group.finish(); + }); +} + +criterion_group!(wsv, apply_blocks); +criterion_main!(wsv); diff --git a/core/benches/blocks/apply_blocks_oneshot.rs b/core/benches/blocks/apply_blocks_oneshot.rs new file mode 100644 index 00000000000..4c8bdd6e389 --- /dev/null +++ b/core/benches/blocks/apply_blocks_oneshot.rs @@ -0,0 +1,31 @@ +//! Oneshot execution of `apply_blocks` benchmark. +//! Can be useful to profile using flamegraph. +//! +//! ```bash +//! CARGO_PROFILE_RELEASE_DEBUG=true cargo flamegraph --root --release --example apply_blocks +//! ``` + +mod apply_blocks; + +use apply_blocks::WsvApplyBlocks; +use iroha_config::base::proxy::Builder; +use iroha_data_model::Level; +use iroha_logger::{Configuration, ConfigurationProxy}; + +#[tokio::main] +async fn main() { + let log_config = Configuration { + max_log_level: Level::INFO.into(), + compact_mode: false, + ..ConfigurationProxy::default() + .build() + .expect("Default logger config should always build") + }; + // Can't use logger because it's failed to initialize. + if let Err(err) = iroha_logger::init(&log_config) { + eprintln!("Failed to initialize logger: {err}"); + } + iroha_logger::info!("Starting..."); + let bench = WsvApplyBlocks::setup().expect("Failed to setup benchmark"); + WsvApplyBlocks::measure(&bench).expect("Failed to execute benchmark"); +} diff --git a/core/benches/blocks/common.rs b/core/benches/blocks/common.rs new file mode 100644 index 00000000000..f4f412eb633 --- /dev/null +++ b/core/benches/blocks/common.rs @@ -0,0 +1,213 @@ +use std::str::FromStr as _; + +use iroha_core::{ + block::{BlockBuilder, CommittedBlock}, + prelude::*, + query::store::LiveQueryStore, + smartcontracts::{Execute, Registrable as _}, + sumeragi::network_topology::Topology, + wsv::World, +}; +use iroha_data_model::{ + account::Account, + asset::{AssetDefinition, AssetDefinitionId}, + domain::Domain, + isi::InstructionExpr, + prelude::*, + transaction::TransactionLimits, +}; +use iroha_primitives::unique_vec::UniqueVec; +use serde_json::json; + +/// Create block +pub fn create_block( + wsv: &mut WorldStateView, + instructions: Vec, + account_id: AccountId, + key_pair: KeyPair, +) -> CommittedBlock { + let transaction = TransactionBuilder::new(account_id) + .with_instructions(instructions) + .sign(key_pair.clone()) + .unwrap(); + let limits = wsv.transaction_executor().transaction_limits; + + let topology = Topology::new(UniqueVec::new()); + let block = BlockBuilder::new( + vec![AcceptedTransaction::accept(transaction, &limits).unwrap()], + topology.clone(), + Vec::new(), + ) + .chain(0, wsv) + .sign(key_pair) + .unwrap() + .commit(&topology) + .unwrap(); + + // Verify that transactions are valid + for tx in &block.payload().transactions { + assert_eq!(tx.error, None); + } + + block +} + +pub fn populate_wsv( + domains: usize, + accounts_per_domain: usize, + assets_per_domain: usize, + owner_id: &AccountId, +) -> Vec { + let mut instructions: Vec = Vec::new(); + for i in 0..domains { + let domain_id = construct_domain_id(i); + let domain = Domain::new(domain_id.clone()); + instructions.push(RegisterExpr::new(domain).into()); + let can_unregister_domain = GrantExpr::new( + PermissionToken::new( + "CanUnregisterDomain".parse().unwrap(), + &json!({ "domain_id": domain_id.clone() }), + ), + owner_id.clone(), + ); + instructions.push(can_unregister_domain.into()); + for j in 0..accounts_per_domain { + let account_id = construct_account_id(j, domain_id.clone()); + let account = Account::new(account_id.clone(), []); + instructions.push(RegisterExpr::new(account).into()); + let can_unregister_account = GrantExpr::new( + PermissionToken::new( + "CanUnregisterAccount".parse().unwrap(), + &json!({ "account_id": account_id.clone() }), + ), + owner_id.clone(), + ); + instructions.push(can_unregister_account.into()); + } + for k in 0..assets_per_domain { + let asset_definition_id = construct_asset_definition_id(k, domain_id.clone()); + let asset_definition = AssetDefinition::new( + asset_definition_id.clone(), + iroha_data_model::asset::AssetValueType::Quantity, + ); + instructions.push(RegisterExpr::new(asset_definition).into()); + let can_unregister_asset_definition = GrantExpr::new( + PermissionToken::new( + "CanUnregisterAssetDefinition".parse().unwrap(), + &json!({ "asset_definition_id": asset_definition_id }), + ), + owner_id.clone(), + ); + instructions.push(can_unregister_asset_definition.into()); + } + } + instructions +} + +pub fn delete_every_nth( + domains: usize, + accounts_per_domain: usize, + assets_per_domain: usize, + nth: usize, +) -> Vec { + let mut instructions: Vec = Vec::new(); + for i in 0..domains { + let domain_id = construct_domain_id(i); + if i % nth == 0 { + instructions.push(UnregisterExpr::new(domain_id.clone()).into()); + } else { + for j in 0..accounts_per_domain { + if j % nth == 0 { + let account_id = construct_account_id(j, domain_id.clone()); + instructions.push(UnregisterExpr::new(account_id.clone()).into()); + } + } + for k in 0..assets_per_domain { + if k % nth == 0 { + let asset_definition_id = construct_asset_definition_id(k, domain_id.clone()); + instructions.push(UnregisterExpr::new(asset_definition_id).into()); + } + } + } + } + instructions +} + +pub fn restore_every_nth( + domains: usize, + accounts_per_domain: usize, + assets_per_domain: usize, + nth: usize, +) -> Vec { + let mut instructions: Vec = Vec::new(); + for i in 0..domains { + let domain_id = construct_domain_id(i); + if i % nth == 0 { + let domain = Domain::new(domain_id.clone()); + instructions.push(RegisterExpr::new(domain).into()); + } + for j in 0..accounts_per_domain { + if j % nth == 0 || i % nth == 0 { + let account_id = construct_account_id(j, domain_id.clone()); + let account = Account::new(account_id.clone(), []); + instructions.push(RegisterExpr::new(account).into()); + } + } + for k in 0..assets_per_domain { + if k % nth == 0 || i % nth == 0 { + let asset_definition_id = construct_asset_definition_id(k, domain_id.clone()); + let asset_definition = AssetDefinition::new( + asset_definition_id, + iroha_data_model::asset::AssetValueType::Quantity, + ); + instructions.push(RegisterExpr::new(asset_definition).into()); + } + } + } + instructions +} + +pub fn build_wsv(account_id: &AccountId, key_pair: &KeyPair) -> WorldStateView { + let kura = iroha_core::kura::Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); + let mut domain = Domain::new(account_id.domain_id.clone()).build(account_id); + domain.accounts.insert( + account_id.clone(), + Account::new(account_id.clone(), [key_pair.public_key().clone()]).build(account_id), + ); + let mut wsv = WorldStateView::new(World::with([domain], UniqueVec::new()), kura, query_handle); + wsv.config.transaction_limits = TransactionLimits::new(u64::MAX, u64::MAX); + wsv.config.wasm_runtime_config.fuel_limit = u64::MAX; + wsv.config.wasm_runtime_config.max_memory = u32::MAX; + + { + let path_to_executor = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../configs/peer/executor.wasm"); + let wasm = std::fs::read(&path_to_executor) + .unwrap_or_else(|_| panic!("Failed to read file: {}", path_to_executor.display())); + let executor = Executor::new(WasmSmartContract::from_compiled(wasm)); + UpgradeExpr::new(executor) + .execute(account_id, &mut wsv) + .expect("Failed to load executor"); + } + + wsv +} + +fn construct_domain_id(i: usize) -> DomainId { + DomainId::from_str(&format!("non_inlinable_domain_name_{i}")).unwrap() +} + +fn construct_account_id(i: usize, domain_id: DomainId) -> AccountId { + AccountId::new( + Name::from_str(&format!("non_inlinable_account_name_{i}")).unwrap(), + domain_id, + ) +} + +fn construct_asset_definition_id(i: usize, domain_id: DomainId) -> AssetDefinitionId { + AssetDefinitionId::new( + Name::from_str(&format!("non_inlinable_asset_definition_name_{i}")).unwrap(), + domain_id, + ) +} diff --git a/core/benches/blocks/validate_blocks.rs b/core/benches/blocks/validate_blocks.rs new file mode 100644 index 00000000000..6a6d0bc585d --- /dev/null +++ b/core/benches/blocks/validate_blocks.rs @@ -0,0 +1,80 @@ +use eyre::Result; +use iroha_core::prelude::*; +use iroha_data_model::{isi::InstructionExpr, prelude::*}; + +#[path = "./common.rs"] +mod common; + +use common::*; + +#[derive(Clone)] +pub struct WsvValidateBlocks { + wsv: WorldStateView, + instructions: Vec>, + key_pair: KeyPair, + account_id: AccountId, +} + +impl WsvValidateBlocks { + /// Create [`WorldStateView`] and blocks for benchmarking + /// + /// # Errors + /// - Failed to parse [`AccountId`] + /// - Failed to generate [`KeyPair`] + /// - Failed to create instructions for block + pub fn setup() -> Result { + let domains = 100; + let accounts_per_domain = 1000; + let assets_per_domain = 1000; + let account_id: AccountId = "alice@wonderland".parse()?; + let key_pair = KeyPair::generate()?; + let wsv = build_wsv(&account_id, &key_pair); + + let nth = 100; + let instructions = [ + populate_wsv(domains, accounts_per_domain, assets_per_domain, &account_id), + delete_every_nth(domains, accounts_per_domain, assets_per_domain, nth), + restore_every_nth(domains, accounts_per_domain, assets_per_domain, nth), + ] + .into_iter() + .collect::>(); + + Ok(Self { + wsv, + instructions, + key_pair, + account_id, + }) + } + + /// Run benchmark body. + /// + /// # Errors + /// - Not enough blocks + /// - Failed to apply block + /// + /// # Panics + /// If wsv isn't one block ahead of finalized wsv. + pub fn measure( + Self { + wsv, + instructions, + key_pair, + account_id, + }: Self, + ) -> Result<()> { + let mut finalized_wsv = wsv; + let mut wsv = finalized_wsv.clone(); + + assert_eq!(wsv.height(), 0); + for (instructions, i) in instructions.into_iter().zip(1..) { + finalized_wsv = wsv.clone(); + let block = create_block(&mut wsv, instructions, account_id.clone(), key_pair.clone()); + wsv.apply_without_execution(&block)?; + assert_eq!(wsv.height(), i); + assert_eq!(wsv.height(), finalized_wsv.height() + 1); + } + + Ok(()) + } +} diff --git a/core/benches/validate_blocks/benchmark.rs b/core/benches/blocks/validate_blocks_benchmark.rs similarity index 93% rename from core/benches/validate_blocks/benchmark.rs rename to core/benches/blocks/validate_blocks_benchmark.rs index 96382bccb11..1417a1a426f 100644 --- a/core/benches/validate_blocks/benchmark.rs +++ b/core/benches/blocks/validate_blocks_benchmark.rs @@ -1,4 +1,4 @@ -#![allow(missing_docs, clippy::restriction)] +#![allow(missing_docs)] mod validate_blocks; diff --git a/core/benches/blocks/validate_blocks_oneshot.rs b/core/benches/blocks/validate_blocks_oneshot.rs new file mode 100644 index 00000000000..bcdeb20a519 --- /dev/null +++ b/core/benches/blocks/validate_blocks_oneshot.rs @@ -0,0 +1,30 @@ +//! Oneshot execution of `validate_blocks` benchmark. +//! Can be useful to profile using flamegraph. +//! +//! ```bash +//! CARGO_PROFILE_RELEASE_DEBUG=true cargo flamegraph --root --release --example validate_blocks +//! ``` + +mod validate_blocks; + +use iroha_config::base::proxy::Builder; +use iroha_data_model::Level; +use iroha_logger::{Configuration, ConfigurationProxy}; +use validate_blocks::WsvValidateBlocks; + +fn main() { + let log_config = Configuration { + max_log_level: Level::INFO.into(), + compact_mode: false, + ..ConfigurationProxy::default() + .build() + .expect("Default logger config should always build") + }; + // Can't use logger because it's failed to initialize. + if let Err(err) = iroha_logger::init(&log_config) { + eprintln!("Failed to initialize logger: {err}"); + } + iroha_logger::info!("Starting..."); + let bench = WsvValidateBlocks::setup().expect("Failed to setup benchmark"); + WsvValidateBlocks::measure(bench).expect("Failed to execute bnechmark"); +} diff --git a/core/benches/kura.rs b/core/benches/kura.rs index 3f5ecfbc0e4..c0371201191 100644 --- a/core/benches/kura.rs +++ b/core/benches/kura.rs @@ -1,4 +1,4 @@ -#![allow(missing_docs, clippy::restriction)] +#![allow(missing_docs)] use std::str::FromStr as _; @@ -8,21 +8,21 @@ use iroha_core::{ block::*, kura::{BlockStore, LockStatus}, prelude::*, + query::store::LiveQueryStore, + sumeragi::network_topology::Topology, wsv::World, }; use iroha_crypto::KeyPair; -use iroha_data_model::{ - block::VersionedCommittedBlock, prelude::*, transaction::TransactionLimits, -}; -use iroha_version::scale::EncodeVersioned; +use iroha_data_model::{prelude::*, transaction::TransactionLimits}; +use iroha_primitives::unique_vec::UniqueVec; use tokio::{fs, runtime::Runtime}; -async fn measure_block_size_for_n_validators(n_validators: u32) { +async fn measure_block_size_for_n_executors(n_executors: u32) { let alice_id = AccountId::from_str("alice@test").expect("tested"); let bob_id = AccountId::from_str("bob@test").expect("tested"); let xor_id = AssetDefinitionId::from_str("xor#test").expect("tested"); - let alice_xor_id = ::Id::new(xor_id, alice_id); - let transfer = TransferBox::new( + let alice_xor_id = AssetId::new(xor_id, alice_id); + let transfer = TransferExpr::new( IdBox::AssetId(alice_xor_id), 10_u32.to_value(), IdBox::AccountId(bob_id), @@ -30,7 +30,7 @@ async fn measure_block_size_for_n_validators(n_validators: u32) { let keypair = KeyPair::generate().expect("Failed to generate KeyPair."); let tx = TransactionBuilder::new(AccountId::from_str("alice@wonderland").expect("checked")) .with_instructions([transfer]) - .sign(keypair) + .sign(keypair.clone()) .expect("Failed to sign."); let transaction_limits = TransactionLimits { max_instruction_number: 4096, @@ -43,40 +43,33 @@ async fn measure_block_size_for_n_validators(n_validators: u32) { iroha_core::kura::Kura::new(iroha_config::kura::Mode::Strict, dir.path(), false).unwrap(); let _thread_handle = iroha_core::kura::Kura::start(kura.clone()); - let mut block = BlockBuilder { - transactions: vec![tx], - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: iroha_core::sumeragi::network_topology::Topology::new(Vec::new()), - key_pair: KeyPair::generate().expect("Failed to generate KeyPair"), - wsv: &mut WorldStateView::new(World::new(), kura), - } - .build(); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(World::new(), kura, query_handle); + let topology = Topology::new(UniqueVec::new()); + let mut block = BlockBuilder::new(vec![tx], topology, Vec::new()) + .chain(0, &mut wsv) + .sign(KeyPair::generate().unwrap()) + .unwrap(); - for _ in 1..n_validators { + for _ in 1..n_executors { block = block .sign(KeyPair::generate().expect("Failed to generate KeyPair.")) .unwrap(); } - let block: VersionedCommittedBlock = block.commit_unchecked().into(); let mut block_store = BlockStore::new(dir.path(), LockStatus::Unlocked); block_store.create_files_if_they_do_not_exist().unwrap(); - - let serialized_block: Vec = block.encode_versioned(); - block_store - .append_block_to_chain(&serialized_block) - .unwrap(); + block_store.append_block_to_chain(&block.into()).unwrap(); let metadata = fs::metadata(dir.path().join("blocks.data")).await.unwrap(); let file_size = Byte::from_bytes(u128::from(metadata.len())).get_appropriate_unit(false); - println!("For {n_validators} validators: {file_size}"); + println!("For {n_executors} executors: {file_size}"); } async fn measure_block_size_async() { println!("File size of a block with 1 transaction with 1 Transfer instruction is:",); for max_faults in 0_u32..5_u32 { - let n_validators = 3 * max_faults + 1; - measure_block_size_for_n_validators(n_validators).await; + let n_executors = 3 * max_faults + 1; + measure_block_size_for_n_executors(n_executors).await; } } diff --git a/core/benches/validate_blocks/oneshot.rs b/core/benches/validate_blocks/oneshot.rs deleted file mode 100644 index f381e38f649..00000000000 --- a/core/benches/validate_blocks/oneshot.rs +++ /dev/null @@ -1,15 +0,0 @@ -//! Oneshot execution of `validate_blocks` benchmark. -//! Can be useful to profile using flamegraph. -//! -//! ```bash -//! CARGO_PROFILE_RELEASE_DEBUG=true cargo flamegraph --root --release --example validate_blocks -//! ``` - -mod validate_blocks; - -use validate_blocks::WsvValidateBlocks; - -fn main() { - let bench = WsvValidateBlocks::setup().expect("Failed to setup benchmark"); - WsvValidateBlocks::measure(bench).expect("Failed to execute bnechmark"); -} diff --git a/core/benches/validate_blocks/validate_blocks.rs b/core/benches/validate_blocks/validate_blocks.rs deleted file mode 100644 index 50073869332..00000000000 --- a/core/benches/validate_blocks/validate_blocks.rs +++ /dev/null @@ -1,237 +0,0 @@ -#![allow(missing_docs, clippy::restriction)] - -use std::{collections::BTreeSet, str::FromStr as _}; - -use eyre::Result; -use iroha_core::{ - block::BlockBuilder, prelude::*, smartcontracts::Execute, sumeragi::network_topology::Topology, - wsv::World, -}; -use iroha_data_model::{ - asset::{AssetDefinition, AssetDefinitionId}, - block::VersionedCommittedBlock, - isi::InstructionBox, - prelude::*, - transaction::TransactionLimits, -}; - -/// Create block and validate it -fn create_block( - instructions: Vec, - account_id: AccountId, - key_pair: KeyPair, - wsv: &mut WorldStateView, -) -> Result { - let transaction = TransactionBuilder::new(account_id) - .with_instructions(instructions) - .sign(key_pair.clone())?; - - let transaction_limits = &wsv.transaction_validator().transaction_limits; - let transaction = AcceptedTransaction::accept(transaction, transaction_limits)?; - - let pending_block = BlockBuilder { - transactions: vec![transaction], - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: Topology::new(Vec::new()), - key_pair, - wsv, - } - .build(); - - Ok(pending_block.commit_unchecked().into()) -} - -fn populate_wsv( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, -) -> Result> { - let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = DomainId::from_str(&i.to_string())?; - let domain = Domain::new(domain_id.clone()); - instructions.push(RegisterBox::new(domain).into()); - for j in 0..accounts_per_domain { - let account_id = AccountId::new(Name::from_str(&j.to_string())?, domain_id.clone()); - let account = Account::new(account_id.clone(), []); - instructions.push(RegisterBox::new(account).into()); - } - for k in 0..assets_per_domain { - let asset_definition_id = - AssetDefinitionId::new(Name::from_str(&k.to_string())?, domain_id.clone()); - let asset_definition = AssetDefinition::new( - asset_definition_id, - iroha_data_model::asset::AssetValueType::Quantity, - ); - instructions.push(RegisterBox::new(asset_definition).into()); - } - } - Ok(instructions) -} - -fn delete_every_nth( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, - nth: usize, -) -> Result> { - let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = DomainId::from_str(&i.to_string())?; - if i % nth == 0 { - instructions.push(UnregisterBox::new(domain_id.clone()).into()); - } else { - for j in 0..accounts_per_domain { - if j % nth == 0 { - let account_id = - AccountId::new(Name::from_str(&j.to_string())?, domain_id.clone()); - instructions.push(UnregisterBox::new(account_id.clone()).into()); - } - } - for k in 0..assets_per_domain { - if k % nth == 0 { - let asset_definition_id = - AssetDefinitionId::new(Name::from_str(&k.to_string())?, domain_id.clone()); - instructions.push(UnregisterBox::new(asset_definition_id).into()); - } - } - } - } - Ok(instructions) -} - -fn restore_every_nth( - domains: usize, - accounts_per_domain: usize, - assets_per_domain: usize, - nth: usize, -) -> Result> { - let mut instructions: Vec = Vec::new(); - for i in 0..domains { - let domain_id = DomainId::from_str(&i.to_string())?; - if i % nth == 0 { - let domain = Domain::new(domain_id.clone()); - instructions.push(RegisterBox::new(domain).into()); - } - for j in 0..accounts_per_domain { - if j % nth == 0 || i % nth == 0 { - let account_id = AccountId::new(Name::from_str(&j.to_string())?, domain_id.clone()); - let account = Account::new(account_id.clone(), []); - instructions.push(RegisterBox::new(account).into()); - } - } - for k in 0..assets_per_domain { - if k % nth == 0 || i % nth == 0 { - let asset_definition_id = - AssetDefinitionId::new(Name::from_str(&k.to_string())?, domain_id.clone()); - let asset_definition = AssetDefinition::new( - asset_definition_id, - iroha_data_model::asset::AssetValueType::Quantity, - ); - instructions.push(RegisterBox::new(asset_definition).into()); - } - } - } - Ok(instructions) -} - -fn build_wsv(account_id: &AccountId, key_pair: &KeyPair) -> WorldStateView { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(World::with([], BTreeSet::new()), kura); - wsv.config.transaction_limits = TransactionLimits::new(u64::MAX, u64::MAX); - - { - let domain = Domain::new(account_id.domain_id.clone()); - RegisterBox::new(domain) - .execute(account_id, &mut wsv) - .expect("Failed to register domain"); - let account = Account::new(account_id.clone(), [key_pair.public_key().clone()]); - RegisterBox::new(account) - .execute(account_id, &mut wsv) - .expect("Failed to register account"); - } - - { - let path_to_validator = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .join("../configs/peer/validator.wasm"); - let wasm = std::fs::read(&path_to_validator) - .unwrap_or_else(|_| panic!("Failed to read file: {}", path_to_validator.display())); - let validator = Validator::new(WasmSmartContract::from_compiled(wasm)); - UpgradeBox::new(validator) - .execute(account_id, &mut wsv) - .expect("Failed to load validator"); - } - - wsv -} - -#[derive(Clone)] -pub struct WsvValidateBlocks { - wsv: WorldStateView, - instructions: Vec>, - key_pair: KeyPair, - account_id: AccountId, -} - -impl WsvValidateBlocks { - /// Create [`WorldStateView`] and blocks for benchmarking - /// - /// # Errors - /// - Failed to parse [`AccountId`] - /// - Failed to generate [`KeyPair`] - /// - Failed to create instructions for block - pub fn setup() -> Result { - let domains = 100; - let accounts_per_domain = 1000; - let assets_per_domain = 1000; - let genesis_id: AccountId = "genesis@genesis".parse()?; - let key_pair = KeyPair::generate()?; - let wsv = build_wsv(&genesis_id, &key_pair); - - let nth = 100; - let instructions = [ - populate_wsv(domains, accounts_per_domain, assets_per_domain), - delete_every_nth(domains, accounts_per_domain, assets_per_domain, nth), - restore_every_nth(domains, accounts_per_domain, assets_per_domain, nth), - ] - .into_iter() - .collect::, _>>()?; - - Ok(Self { - wsv, - instructions, - key_pair, - account_id: genesis_id, - }) - } - - /// Run benchmark body. - /// - /// # Errors - /// - Not enough blocks - /// - Failed to apply block - /// - /// # Panics - /// If wsv isn't one block ahead of finalized wsv. - pub fn measure( - Self { - wsv, - instructions, - key_pair, - account_id, - }: Self, - ) -> Result<()> { - let mut finalized_wsv = wsv; - let mut wsv = finalized_wsv.clone(); - - for instructions in instructions { - finalized_wsv = wsv.clone(); - let block = create_block(instructions, account_id.clone(), key_pair.clone(), &mut wsv)?; - wsv.apply_without_execution(&block)?; - assert_eq!(wsv.height(), finalized_wsv.height() + 1); - } - - Ok(()) - } -} diff --git a/core/benches/validation.rs b/core/benches/validation.rs index 3a1247cca62..0a474ab3ea0 100644 --- a/core/benches/validation.rs +++ b/core/benches/validation.rs @@ -1,17 +1,19 @@ -#![allow(missing_docs, clippy::restriction)] +#![allow(missing_docs)] -use std::{collections::BTreeSet, str::FromStr as _}; +use std::str::FromStr as _; use criterion::{criterion_group, criterion_main, Criterion}; use iroha_core::{ block::*, prelude::*, + query::store::LiveQueryStore, smartcontracts::{isi::Registrable as _, Execute}, sumeragi::network_topology::Topology, - tx::TransactionValidator, + tx::TransactionExecutor, wsv::World, }; use iroha_data_model::{prelude::*, transaction::TransactionLimits}; +use iroha_primitives::unique_vec::UniqueVec; const START_DOMAIN: &str = "start"; const START_ACCOUNT: &str = "starter"; @@ -21,15 +23,15 @@ const TRANSACTION_LIMITS: TransactionLimits = TransactionLimits { max_wasm_size_bytes: 0, }; -fn build_test_transaction(keys: KeyPair) -> VersionedSignedTransaction { +fn build_test_transaction(keys: KeyPair) -> SignedTransaction { let domain_name = "domain"; let domain_id = DomainId::from_str(domain_name).expect("does not panic"); - let create_domain = RegisterBox::new(Domain::new(domain_id)); + let create_domain = RegisterExpr::new(Domain::new(domain_id)); let account_name = "account"; let (public_key, _) = KeyPair::generate() .expect("Failed to generate KeyPair.") .into(); - let create_account = RegisterBox::new(Account::new( + let create_account = RegisterExpr::new(Account::new( AccountId::new( account_name.parse().expect("Valid"), domain_name.parse().expect("Valid"), @@ -40,7 +42,7 @@ fn build_test_transaction(keys: KeyPair) -> VersionedSignedTransaction { "xor".parse().expect("Valid"), domain_name.parse().expect("Valid"), ); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id)); + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)); let instructions = [create_domain, create_account, create_asset]; TransactionBuilder::new(AccountId::new( @@ -54,6 +56,7 @@ fn build_test_transaction(keys: KeyPair) -> VersionedSignedTransaction { fn build_test_and_transient_wsv(keys: KeyPair) -> WorldStateView { let kura = iroha_core::kura::Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let (public_key, _) = keys.into(); let mut wsv = WorldStateView::new( @@ -66,21 +69,22 @@ fn build_test_and_transient_wsv(keys: KeyPair) -> WorldStateView { let mut domain = Domain::new(domain_id).build(&account_id); let account = Account::new(account_id.clone(), [public_key]).build(&account_id); assert!(domain.add_account(account).is_none()); - World::with([domain], BTreeSet::new()) + World::with([domain], UniqueVec::new()) }, kura, + query_handle, ); { - let path_to_validator = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) - .join("../configs/peer/validator.wasm"); - let wasm = std::fs::read(&path_to_validator) - .unwrap_or_else(|_| panic!("Failed to read file: {}", path_to_validator.display())); - let validator = Validator::new(WasmSmartContract::from_compiled(wasm)); + let path_to_executor = std::path::PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../configs/peer/executor.wasm"); + let wasm = std::fs::read(&path_to_executor) + .unwrap_or_else(|_| panic!("Failed to read file: {}", path_to_executor.display())); + let executor = Executor::new(WasmSmartContract::from_compiled(wasm)); let authority = "genesis@genesis".parse().expect("Valid"); - UpgradeBox::new(validator) + UpgradeExpr::new(executor) .execute(&authority, &mut wsv) - .expect("Failed to load validator"); + .expect("Failed to load executor"); } wsv @@ -126,10 +130,10 @@ fn validate_transaction(criterion: &mut Criterion) { let mut failure_count = 0; let wsv = build_test_and_transient_wsv(keys); let _ = criterion.bench_function("validate", move |b| { - let transaction_validator = TransactionValidator::new(TRANSACTION_LIMITS); + let transaction_executor = TransactionExecutor::new(TRANSACTION_LIMITS); b.iter(|| { let mut wsv = wsv.clone(); - match transaction_validator.validate(transaction.clone(), &mut wsv) { + match transaction_executor.validate(transaction.clone(), &mut wsv) { Ok(_) => success_count += 1, Err(_) => failure_count += 1, } @@ -145,25 +149,19 @@ fn sign_blocks(criterion: &mut Criterion) { .expect("Failed to accept transaction."); let key_pair = KeyPair::generate().expect("Failed to generate KeyPair."); let kura = iroha_core::kura::Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(World::new(), kura, query_handle); + let topology = Topology::new(UniqueVec::new()); let mut success_count = 0; let mut failures_count = 0; - let _ = criterion.bench_function("sign_block", |b| { - b.iter(|| { - let block = BlockBuilder { - transactions: vec![transaction.clone()], - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: Topology::new(Vec::new()), - key_pair: key_pair.clone(), - wsv: &mut WorldStateView::new(World::new(), kura.clone()), - } - .build(); - match block.sign(key_pair.clone()) { - Ok(_) => success_count += 1, - Err(_) => failures_count += 1, - } + let block = BlockBuilder::new(vec![transaction], topology, Vec::new()).chain(0, &mut wsv); + + let _ = criterion.bench_function("sign_block", |b| { + b.iter(|| match block.clone().sign(key_pair.clone()) { + Ok(_) => success_count += 1, + Err(_) => failures_count += 1, }); }); println!("Success count: {success_count}, Failures count: {failures_count}"); diff --git a/core/src/block.rs b/core/src/block.rs index 14f68e405cf..9322d16400d 100644 --- a/core/src/block.rs +++ b/core/src/block.rs @@ -1,16 +1,10 @@ -//! This module contains [`Block`] structures for each state, it's -//! transitions, implementations and related traits -//! implementations. [`Block`]s are organised into a linear sequence -//! over time (also known as the block chain). A Block's life-cycle -//! starts from [`PendingBlock`]. -#![allow( - clippy::module_name_repetitions, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects -)] - -use std::error::Error; +//! This module contains [`Block`] structures for each state. Transitions are modeled as follows: +//! 1. If a new block is constructed by the node: +//! `BlockBuilder` -> `BlockBuilder` -> `ValidBlock` -> `CommittedBlock` +//! 2. If a block is received, i.e. deserialized: +//! `SignedBlock` -> `ValidBlock` -> `CommittedBlock` +//! [`Block`]s are organised into a linear sequence over time (also known as the block chain). +use std::error::Error as _; use iroha_config::sumeragi::default::DEFAULT_CONSENSUS_ESTIMATION_MS; use iroha_crypto::{HashOf, KeyPair, MerkleTree, SignatureOf, SignaturesOf}; @@ -21,45 +15,34 @@ use iroha_data_model::{ transaction::{error::TransactionRejectionReason, prelude::*}, }; use iroha_genesis::GenesisTransaction; -use parity_scale_codec::{Decode, Encode}; -use sealed::sealed; +use iroha_primitives::unique_vec::UniqueVec; use thiserror::Error; -use crate::{ - prelude::*, - sumeragi::network_topology::{SignatureVerificationError, Topology}, - tx::{AcceptTransactionFail, TransactionValidator}, -}; - -/// Errors occurred on block commit -#[derive(Debug, Error, displaydoc::Display, Clone, Copy)] -pub enum BlockCommitError { - /// Error during signature verification - SignatureVerificationError(#[from] SignatureVerificationError), -} +pub use self::{chained::Chained, commit::CommittedBlock, valid::ValidBlock}; +use crate::{prelude::*, sumeragi::network_topology::Topology, tx::AcceptTransactionFail}; -/// Errors occurred on signing block or adding additional signature -#[derive(Debug, Error, displaydoc::Display)] -pub enum BlockSignError { - /// Failed to create signature - Sign(#[source] iroha_crypto::error::Error), - /// Failed to add signature for block - AddSignature(#[source] iroha_crypto::error::Error), +/// Error during transaction validation +#[derive(Debug, displaydoc::Display, Error)] +pub enum TransactionValidationError { + /// Failed to accept transaction + Accept(#[from] AcceptTransactionFail), + /// A transaction is marked as accepted, but is actually invalid + NotValid(#[from] TransactionRejectionReason), + /// A transaction is marked as rejected, but is actually valid + RejectedIsValid, } -/// Errors occurred on block revalidation -#[derive(Debug, Error, displaydoc::Display)] -pub enum BlockRevalidationError { - /// Block is empty - Empty, +/// Errors occurred on block validation +#[derive(Debug, displaydoc::Display, Error)] +pub enum BlockValidationError { /// Block has committed transactions HasCommittedTransactions, /// Mismatch between the actual and expected hashes of the latest block. Expected: {expected:?}, actual: {actual:?} LatestBlockHashMismatch { /// Expected value - expected: Option>, + expected: Option>, /// Actual value - actual: Option>, + actual: Option>, }, /// Mismatch between the actual and expected height of the latest block. Expected: {expected}, actual: {actual} LatestBlockHeightMismatch { @@ -70,16 +53,14 @@ pub enum BlockRevalidationError { }, /// The transaction hash stored in the block header does not match the actual transaction hash TransactionHashMismatch, - /// The hash of a rejected transaction stored in the block header does not match the actual hash or this transaction - RejectedTransactionHashMismatch, - /// Error during transaction revalidation - TransactionRevalidation(#[from] TransactionRevalidationError), + /// Error during transaction validation + TransactionValidation(#[from] TransactionValidationError), /// Mismatch between the actual and expected topology. Expected: {expected:?}, actual: {actual:?} TopologyMismatch { /// Expected value - expected: Vec, + expected: UniqueVec, /// Actual value - actual: Vec, + actual: UniqueVec, }, /// Error during block signatures check SignatureVerification(#[from] SignatureVerificationError), @@ -87,532 +68,657 @@ pub enum BlockRevalidationError { ViewChangeIndexTooLarge, } -/// Error during transaction revalidation -#[derive(Debug, Error, displaydoc::Display)] -pub enum TransactionRevalidationError { - /// Failed to accept transaction - Accept(#[from] AcceptTransactionFail), - /// Transaction isn't valid but must be - NotValid(#[from] TransactionRejectionReason), - /// Rejected transaction in valid - RejectedIsValid, +/// Error during signature verification +#[derive(thiserror::Error, displaydoc::Display, Debug, Clone, Copy, PartialEq, Eq)] +pub enum SignatureVerificationError { + /// The block doesn't have enough valid signatures to be committed ({votes_count} out of {min_votes_for_commit}) + NotEnoughSignatures { + /// Current number of signatures + votes_count: usize, + /// Minimal required number of signatures + min_votes_for_commit: usize, + }, + /// The block doesn't contain an expected signature. Expected signature can be leader or the current peer + SignatureMissing, + /// Found signature that does not correspond to block payload + UnknownSignature, + /// The block doesn't have proxy tail signature + ProxyTailMissing, + /// The block doesn't have leader signature + LeaderMissing, } -/// Transaction data is permanently recorded in chunks called -/// blocks. -#[derive(Debug, Clone, Decode, Encode)] -pub struct PendingBlock { - /// Block header - pub header: BlockHeader, - /// Array of transactions. - pub transactions: Vec, - /// Event recommendations. - pub event_recommendations: Vec, - /// Signatures of peers which approved this block - pub signatures: SignaturesOf, -} +/// Builder for blocks +#[derive(Debug, Clone)] +pub struct BlockBuilder(B); -/// Builder for `PendingBlock` -pub struct BlockBuilder<'world> { - /// Block's transactions. - pub transactions: Vec, - /// Block's event recommendations. - pub event_recommendations: Vec, - /// The view change index this block was committed with. Produced by consensus. - pub view_change_index: u64, - /// The topology thihs block was committed with. Produced by consensus. - pub committed_with_topology: Topology, - /// The keypair used to sign this block. - pub key_pair: KeyPair, - /// The world state to be used when validating the block. - pub wsv: &'world mut WorldStateView, -} +mod pending { + use iroha_data_model::transaction::TransactionValue; -impl BlockBuilder<'_> { - /// Create a new [`PendingBlock`] from transactions. - pub fn build(self) -> PendingBlock { - let timestamp = crate::current_time().as_millis(); - let height = self.wsv.height() + 1; - let previous_block_hash = self.wsv.latest_block_hash(); - let transaction_validator = self.wsv.transaction_validator(); - // TODO: Need to check if the `transactions` vector is empty. It shouldn't be allowed. - - let mut header = BlockHeader { - timestamp, - consensus_estimation: DEFAULT_CONSENSUS_ESTIMATION_MS, - height, - view_change_index: self.view_change_index, - previous_block_hash, - transactions_hash: None, - rejected_transactions_hash: None, - committed_with_topology: self.committed_with_topology.sorted_peers, - }; - - let mut txs = Vec::new(); - - for tx in self.transactions { - match transaction_validator.validate(tx, self.wsv) { - Ok(transaction) => txs.push(TransactionValue { - value: transaction, - error: None, - }), - Err((transaction, error)) => { - iroha_logger::warn!( - reason = %error, - caused_by = ?error.source(), - "Transaction validation failed", - ); - txs.push(TransactionValue { - value: transaction, - error: Some(error), - }); - } + use super::*; + + /// First stage in the life-cycle of a [`Block`]. + /// In the beginning the block is assumed to be verified and to contain only accepted transactions. + /// Additionally the block must retain events emitted during the execution of on-chain logic during + /// the previous round, which might then be processed by the trigger system. + #[derive(Debug, Clone)] + pub struct Pending { + /// The topology at the time of block commit. + commit_topology: Topology, + /// Collection of transactions which have been accepted. + /// Transaction will be validated when block is chained. + transactions: Vec, + /// Event recommendations for use in triggers and off-chain work + event_recommendations: Vec, + } + + impl BlockBuilder { + /// Create [`Self`] + /// + /// # Panics + /// + /// if the given list of transaction is empty + #[inline] + pub fn new( + transactions: Vec, + commit_topology: Topology, + event_recommendations: Vec, + ) -> Self { + assert!(!transactions.is_empty(), "Empty block created"); + + Self(Pending { + commit_topology, + transactions, + event_recommendations, + }) + } + + fn make_header( + previous_height: u64, + previous_block_hash: Option>, + view_change_index: u64, + transactions: &[TransactionValue], + ) -> BlockHeader { + BlockHeader { + timestamp_ms: iroha_data_model::current_time() + .as_millis() + .try_into() + .expect("Time should fit into u64"), + consensus_estimation_ms: DEFAULT_CONSENSUS_ESTIMATION_MS, + height: previous_height + 1, + view_change_index, + previous_block_hash, + transactions_hash: transactions + .iter() + .map(TransactionValue::hash) + .collect::>() + .hash(), } } - header.transactions_hash = txs - .iter() - .filter(|tx| tx.error.is_none()) - .map(|tx| tx.value.hash()) - .collect::>() - .hash(); - header.rejected_transactions_hash = txs - .iter() - .filter(|tx| tx.error.is_some()) - .map(|tx| tx.value.hash()) - .collect::>() - .hash(); - // TODO: Validate Event recommendations somehow? - - let signature = SignatureOf::from_hash( - self.key_pair, - HashOf::from_untyped_unchecked(Hash::new(header.payload())), - ) - .expect("Signing of new block failed."); - let signatures = SignaturesOf::from(signature); - - PendingBlock { - header, - transactions: txs, - event_recommendations: self.event_recommendations, - signatures, + + fn categorize_transactions( + transactions: Vec, + wsv: &mut WorldStateView, + ) -> Vec { + transactions + .into_iter() + .map(|tx| match wsv.transaction_executor().validate(tx, wsv) { + Ok(tx) => TransactionValue { + value: tx, + error: None, + }, + Err((tx, error)) => { + iroha_logger::warn!( + reason = %error, + caused_by = ?error.source(), + "Transaction validation failed", + ); + TransactionValue { + value: tx, + error: Some(error), + } + } + }) + .collect() + } + + /// Chain the block with existing blockchain. + /// + /// Upon executing this method current timestamp is stored in the block header. + pub fn chain( + self, + view_change_index: u64, + wsv: &mut WorldStateView, + ) -> BlockBuilder { + let transactions = Self::categorize_transactions(self.0.transactions, wsv); + + BlockBuilder(Chained(BlockPayload { + header: Self::make_header( + wsv.height(), + wsv.latest_block_hash(), + view_change_index, + &transactions, + ), + transactions, + commit_topology: self.0.commit_topology.ordered_peers, + event_recommendations: self.0.event_recommendations, + })) } } } -impl PendingBlock { - const fn is_genesis(&self) -> bool { - self.header.height == 1 - } +mod chained { + use super::*; - /// Calculate the partial hash of the current block. - pub fn partial_hash(&self) -> HashOf { - HashOf::from_untyped_unchecked(Hash::new(self.header.payload())) + /// When a [`Pending`] block is chained with the blockchain it becomes [`Chained`] block. + #[derive(Debug, Clone)] + pub struct Chained(pub(super) BlockPayload); + + impl BlockBuilder { + /// Sign this block and get [`SignedBlock`]. + /// + /// # Errors + /// + /// Fails if signature generation fails + pub fn sign(self, key_pair: KeyPair) -> Result { + let signature = SignatureOf::new(key_pair, &self.0 .0)?; + + Ok(ValidBlock( + SignedBlockV1 { + payload: self.0 .0, + signatures: SignaturesOf::from(signature), + } + .into(), + )) + } } +} - /// Return signatures that are verified with the `hash` of this block, - /// removing all other signatures. - #[inline] - pub fn retain_verified_signatures(&mut self) -> impl Iterator> { - self.signatures.retain_verified_by_hash(self.partial_hash()) - } +mod valid { + use super::*; + use crate::sumeragi::network_topology::Role; - /// Commit block to the store. - /// When calling this function, the user is responsible for the validity of the block signatures. - /// Preference should be given to [`Self::commit`], where signature verification is built in. - #[inline] - pub fn commit_unchecked(self) -> CommittedBlock { - let Self { - header, - transactions, - event_recommendations, - signatures, - } = self; - - CommittedBlock { - event_recommendations, - header, - transactions, - signatures: signatures.transmute(), - } - } + /// Block that was validated and accepted + #[derive(Debug, Clone)] + #[repr(transparent)] + pub struct ValidBlock(pub(super) SignedBlock); - /// Verify signatures and commit block to the store. - /// - /// # Errors - /// - /// Not enough signatures - #[inline] - pub fn commit( - mut self, - topology: &Topology, - ) -> Result { - let hash = self.partial_hash(); - if let Err(err) = topology.verify_signatures(&mut self.signatures, hash) { - return Err((self, err.into())); + impl ValidBlock { + pub(crate) fn payload(&self) -> &BlockPayload { + self.0.payload() + } + pub(crate) fn signatures(&self) -> &SignaturesOf { + self.0.signatures() } - Ok(self.commit_unchecked()) - } + /// Validate a block against the current state of the world. + /// + /// # Errors + /// + /// - Block is empty + /// - There is a mismatch between candidate block height and actual blockchain height + /// - There is a mismatch between candidate block previous block hash and actual latest block hash + /// - Block has committed transactions + /// - Block header transaction hashes don't match with computed transaction hashes + /// - Error during validation of individual transactions + /// - Topology field is incorrect + pub fn validate( + block: SignedBlock, + topology: &Topology, + wsv: &mut WorldStateView, + ) -> Result { + let actual_commit_topology = &block.payload().commit_topology; + let expected_commit_topology = &topology.ordered_peers; + + if actual_commit_topology != expected_commit_topology { + let actual_commit_topology = actual_commit_topology.clone(); + + return Err(( + block, + BlockValidationError::TopologyMismatch { + expected: expected_commit_topology.clone(), + actual: actual_commit_topology, + }, + )); + } - /// Add additional signatures for [`SignedBlock`]. - /// - /// # Errors - /// Fails if signature generation fails - pub fn sign(mut self, key_pair: KeyPair) -> Result { - SignatureOf::from_hash(key_pair, self.partial_hash()) - .map(|signature| { - self.signatures.insert(signature); - self - }) - .map_err(BlockSignError::Sign) - } + if !block.payload().header.is_genesis() + && topology + .filter_signatures_by_roles(&[Role::Leader], block.signatures()) + .is_empty() + { + return Err((block, SignatureVerificationError::LeaderMissing.into())); + } - /// Add additional signature for [`SignedBlock`] - /// - /// # Errors - /// Fails if given signature doesn't match block hash - pub fn add_signature(&mut self, signature: SignatureOf) -> Result<(), BlockSignError> { - signature - .verify_hash(self.partial_hash()) - .map(|_| { - self.signatures.insert(signature); - }) - .map_err(BlockSignError::AddSignature) - } + let expected_block_height = wsv.height() + 1; + let actual_height = block.payload().header.height; + + if expected_block_height != actual_height { + return Err(( + block, + BlockValidationError::LatestBlockHeightMismatch { + expected: expected_block_height, + actual: actual_height, + }, + )); + } - /// Create dummy [`ValidBlock`]. Used in tests - /// - /// # Panics - /// If generating keys or block signing fails. - #[allow(clippy::restriction)] - #[cfg(test)] - pub fn new_dummy() -> Self { - let timestamp = crate::current_time().as_millis(); - - let header = BlockHeader { - timestamp, - consensus_estimation: DEFAULT_CONSENSUS_ESTIMATION_MS, - height: 1, - view_change_index: 0, - previous_block_hash: None, - transactions_hash: None, - rejected_transactions_hash: None, - committed_with_topology: Vec::new(), - }; - - let key_pair = KeyPair::generate().unwrap(); - let signature = SignatureOf::from_hash(key_pair, HashOf::new(&header).transmute()) - .expect("Signing of new block failed."); - let signatures = SignaturesOf::from(signature); - - Self { - header, - transactions: Vec::new(), - event_recommendations: Vec::new(), - signatures, - } - } -} + let expected_previous_block_hash = wsv.latest_block_hash(); + let actual_block_hash = block.payload().header.previous_block_hash; + + if expected_previous_block_hash != actual_block_hash { + return Err(( + block, + BlockValidationError::LatestBlockHashMismatch { + expected: expected_previous_block_hash, + actual: actual_block_hash, + }, + )); + } -/// This sealed trait represents the ability to revalidate a block. Should be -/// implemented for both [`PendingBlock`] and [`VersionedCommittedBlock`]. -/// Public users should only use this trait's extensions [`InGenesis`] and -/// [`InBlock`]. -#[sealed] -pub trait Revalidate: Sized { - /// # Errors - /// - When the block is deemed invalid. - fn revalidate(&self, wsv: &mut WorldStateView) -> Result<(), BlockRevalidationError>; - - /// Return whether or not the block contains transactions already committed. - fn has_committed_transactions(&self, wsv: &WorldStateView) -> bool; -} + if block + .payload() + .transactions + .iter() + .any(|tx| wsv.has_transaction(tx.hash())) + { + return Err((block, BlockValidationError::HasCommittedTransactions)); + } -#[sealed] -impl Revalidate for PendingBlock { - /// Revalidate a block against the current state of the world. - /// - /// # Errors - /// - Block is empty - /// - Block has committed transactions - /// - There is a mismatch between candidate block height and actual blockchain height - /// - There is a mismatch between candidate block previous block hash and actual latest block hash - /// - Block header transaction hashes don't match with computed transaction hashes - /// - Error during revalidation of individual transactions - #[allow(clippy::too_many_lines)] - fn revalidate(&self, wsv: &mut WorldStateView) -> Result<(), BlockRevalidationError> { - let latest_block_hash = wsv.latest_block_hash(); - let block_height = wsv.height(); - let transaction_validator = wsv.transaction_validator(); - - if self.transactions.is_empty() { - return Err(BlockRevalidationError::Empty); - } + if let Err(error) = Self::validate_transactions(&block, wsv) { + return Err((block, error.into())); + } - if self.has_committed_transactions(wsv) { - return Err(BlockRevalidationError::HasCommittedTransactions); + let SignedBlock::V1(block) = block; + Ok(ValidBlock( + SignedBlockV1 { + payload: block.payload, + signatures: block.signatures, + } + .into(), + )) } - if latest_block_hash != self.header.previous_block_hash { - return Err(BlockRevalidationError::LatestBlockHashMismatch { - expected: latest_block_hash, - actual: self.header.previous_block_hash, - }); - } + fn validate_transactions( + block: &SignedBlock, + wsv: &mut WorldStateView, + ) -> Result<(), TransactionValidationError> { + let is_genesis = block.payload().header.is_genesis(); - if block_height + 1 != self.header.height { - return Err(BlockRevalidationError::LatestBlockHeightMismatch { - expected: block_height + 1, - actual: self.header.height, - }); - } + block.payload() + .transactions + .iter() + // TODO: Unnecessary clone? + .cloned() + .try_for_each(|TransactionValue{value, error}| { + let transaction_executor = wsv.transaction_executor(); + let limits = &transaction_executor.transaction_limits; + + let tx = if is_genesis { + AcceptedTransaction::accept_genesis(GenesisTransaction(value)) + } else { + AcceptedTransaction::accept(value, limits)? + }; - revalidate_hashes( - &self.transactions, - self.header.transactions_hash, - self.header.rejected_transactions_hash, - )?; + if error.is_some() { + match transaction_executor.validate(tx, wsv) { + Err(rejected_transaction) => Ok(rejected_transaction), + Ok(_) => Err(TransactionValidationError::RejectedIsValid), + }?; + } else { + transaction_executor.validate(tx, wsv).map_err(|(_tx, error)| { + TransactionValidationError::NotValid(error) + })?; + } - revalidate_transactions( - &self.transactions, - wsv, - transaction_validator, - self.is_genesis(), - )?; + Ok(()) + }) + } - Ok(()) - } + /// The manipulation of the topology relies upon all peers seeing the same signature set. + /// Therefore we must clear the signatures and accept what the proxy tail giveth. + /// + /// # Errors + /// + /// - Not enough signatures + /// - Not signed by proxy tail + pub(crate) fn commit_with_signatures( + mut self, + topology: &Topology, + signatures: SignaturesOf, + ) -> Result { + if topology + .filter_signatures_by_roles(&[Role::Leader], &signatures) + .is_empty() + { + return Err((self, SignatureVerificationError::LeaderMissing.into())); + } - /// Check if a block has transactions that are already in the blockchain. - fn has_committed_transactions(&self, wsv: &WorldStateView) -> bool { - self.transactions - .iter() - .any(|tx| wsv.has_transaction(tx.value.hash())) - } -} + if !self.signatures().is_subset(&signatures) { + return Err((self, SignatureVerificationError::SignatureMissing.into())); + } + + if !self.0.replace_signatures(signatures) { + return Err((self, SignatureVerificationError::UnknownSignature.into())); + } -#[sealed] -impl Revalidate for VersionedCommittedBlock { - /// Revalidate a block against the current state of the world. - /// - /// # Errors - /// - Block is empty - /// - Block has committed transactions - /// - There is a mismatch between candidate block height and actual blockchain height - /// - There is a mismatch between candidate block previous block hash and actual latest block hash - /// - Block header transaction hashes don't match with computed transaction hashes - /// - Error during revalidation of individual transactions - #[allow(clippy::too_many_lines)] - fn revalidate(&self, wsv: &mut WorldStateView) -> Result<(), BlockRevalidationError> { - let latest_block_hash = wsv.latest_block_hash(); - let block_height = wsv.height(); - let transaction_validator = wsv.transaction_validator(); - let is_genesis = block_height == 0; - - if self.has_committed_transactions(wsv) { - return Err(BlockRevalidationError::HasCommittedTransactions); + self.commit(topology) } - match self { - VersionedCommittedBlock::V1(block) => { - if block.transactions.is_empty() { - return Err(BlockRevalidationError::Empty); + /// Verify signatures and commit block to the store. + /// + /// # Errors + /// + /// - Not enough signatures + /// - Not signed by proxy tail + pub fn commit( + self, + topology: &Topology, + ) -> Result { + if !self.payload().header.is_genesis() { + if let Err(err) = self.verify_signatures(topology) { + return Err((self, err.into())); } + } - if latest_block_hash != block.header.previous_block_hash { - return Err(BlockRevalidationError::LatestBlockHashMismatch { - expected: latest_block_hash, - actual: block.header.previous_block_hash, - }); - } + Ok(CommittedBlock(self.0)) + } + + /// Add additional signatures for [`Self`]. + /// + /// # Errors + /// + /// If signature generation fails + pub fn sign(self, key_pair: KeyPair) -> Result { + self.0.sign(key_pair).map(ValidBlock) + } - if block_height + 1 != block.header.height { - return Err(BlockRevalidationError::LatestBlockHeightMismatch { - expected: block_height + 1, - actual: block.header.height, + /// Add additional signature for [`Self`] + /// + /// # Errors + /// + /// If given signature doesn't match block hash + pub fn add_signature( + &mut self, + signature: SignatureOf, + ) -> Result<(), iroha_crypto::error::Error> { + self.0.add_signature(signature) + } + + #[cfg(test)] + pub(crate) fn new_dummy() -> Self { + BlockBuilder(Chained(BlockPayload { + header: BlockHeader { + timestamp_ms: 0, + consensus_estimation_ms: DEFAULT_CONSENSUS_ESTIMATION_MS, + height: 2, + view_change_index: 0, + previous_block_hash: None, + transactions_hash: None, + }, + transactions: Vec::new(), + commit_topology: UniqueVec::new(), + event_recommendations: Vec::new(), + })) + .sign(KeyPair::generate().unwrap()) + .unwrap() + } + + /// Check if block's signatures meet requirements for given topology. + /// + /// In order for block to be considered valid there should be at least $2f + 1$ signatures (including proxy tail and leader signature) where f is maximum number of faulty nodes. + /// For further information please refer to the [whitepaper](docs/source/iroha_2_whitepaper.md) section 2.8 consensus. + /// + /// # Errors + /// - Not enough signatures + /// - Missing proxy tail signature + fn verify_signatures(&self, topology: &Topology) -> Result<(), SignatureVerificationError> { + // TODO: Should the peer that serves genesis have a fixed role of ProxyTail in topology? + if !self.payload().header.is_genesis() + && topology.is_consensus_required().is_some() + && topology + .filter_signatures_by_roles(&[Role::ProxyTail], self.signatures()) + .is_empty() + { + return Err(SignatureVerificationError::ProxyTailMissing); + } + + #[allow(clippy::collapsible_else_if)] + if self.payload().header.is_genesis() { + // At genesis round we blindly take on the network topology from the genesis block. + } else { + let roles = [ + Role::ValidatingPeer, + Role::Leader, + Role::ProxyTail, + Role::ObservingPeer, + ]; + + let votes_count = topology + .filter_signatures_by_roles(&roles, self.signatures()) + .len(); + if votes_count < topology.min_votes_for_commit() { + return Err(SignatureVerificationError::NotEnoughSignatures { + votes_count, + min_votes_for_commit: topology.min_votes_for_commit(), }); } + } - if !is_genesis { - // Recrate topology with witch block must be committed at given view change index - // And then verify committed_with_topology field and block signatures - let topology = { - let last_committed_block = wsv - .latest_block_ref() - .expect("Not in genesis round so must have at least genesis block"); - let new_peers = wsv.peers_ids().iter().cloned().collect(); - let view_change_index = block - .header - .view_change_index - .try_into() - .map_err(|_| BlockRevalidationError::ViewChangeIndexTooLarge)?; - Topology::recreate_topology( - &last_committed_block, - view_change_index, - new_peers, - ) - }; + Ok(()) + } + } - if topology.sorted_peers != block.header.committed_with_topology { - return Err(BlockRevalidationError::TopologyMismatch { - expected: topology.sorted_peers, - actual: block.header.committed_with_topology.clone(), - }); - } + impl From for SignedBlock { + fn from(source: ValidBlock) -> Self { + source.0 + } + } - topology.verify_signatures( - &mut block.signatures.clone(), - HashOf::from_untyped_unchecked(block.partial_hash().internal), - )?; - } + #[cfg(test)] + mod tests { + use super::*; + use crate::sumeragi::network_topology::test_peers; + + #[test] + fn signature_verification_ok() { + let key_pairs = core::iter::repeat_with(|| { + KeyPair::generate().expect("Failed to generate key pair") + }) + .take(7) + .collect::>(); + let mut key_pairs_iter = key_pairs.iter(); + let peers = test_peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; + let topology = Topology::new(peers); + + let mut block = ValidBlock::new_dummy(); + let payload = block.payload().clone(); + key_pairs + .iter() + .map(|key_pair| { + SignatureOf::new(key_pair.clone(), &payload).expect("Failed to sign") + }) + .try_for_each(|signature| block.add_signature(signature)) + .expect("Failed to add signatures"); - revalidate_hashes( - &block.transactions, - block.header.transactions_hash, - block.header.rejected_transactions_hash, - )?; + assert_eq!(block.verify_signatures(&topology), Ok(())); + } - revalidate_transactions( - &block.transactions, - wsv, - transaction_validator, - block.is_genesis(), - )?; + #[test] + fn signature_verification_consensus_not_required_ok() { + let key_pairs = core::iter::repeat_with(|| { + KeyPair::generate().expect("Failed to generate key pair") + }) + .take(1) + .collect::>(); + let mut key_pairs_iter = key_pairs.iter(); + let peers = test_peers![0,: key_pairs_iter]; + let topology = Topology::new(peers); + + let mut block = ValidBlock::new_dummy(); + let payload = block.payload().clone(); + key_pairs + .iter() + .enumerate() + .map(|(_, key_pair)| { + SignatureOf::new(key_pair.clone(), &payload).expect("Failed to sign") + }) + .try_for_each(|signature| block.add_signature(signature)) + .expect("Failed to add signatures"); + + assert_eq!(block.verify_signatures(&topology), Ok(())); + } - Ok(()) - } + /// Check requirement of having at least $2f + 1$ signatures in $3f + 1$ network + #[test] + fn signature_verification_not_enough_signatures() { + let key_pairs = core::iter::repeat_with(|| { + KeyPair::generate().expect("Failed to generate key pair") + }) + .take(7) + .collect::>(); + let mut key_pairs_iter = key_pairs.iter(); + let peers = test_peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; + let topology = Topology::new(peers); + + let mut block = ValidBlock::new_dummy(); + let payload = block.payload().clone(); + let proxy_tail_signature = + SignatureOf::new(key_pairs[4].clone(), &payload).expect("Failed to sign"); + block + .add_signature(proxy_tail_signature) + .expect("Failed to add signature"); + + assert_eq!( + block.verify_signatures(&topology), + Err(SignatureVerificationError::NotEnoughSignatures { + votes_count: 1, + min_votes_for_commit: topology.min_votes_for_commit(), + }) + ) } - } - /// Check if a block has transactions that are already in the blockchain. - fn has_committed_transactions(&self, wsv: &WorldStateView) -> bool { - match self { - VersionedCommittedBlock::V1(block) => block - .transactions + /// Check requirement of having leader signature + #[test] + fn signature_verification_miss_proxy_tail_signature() { + let key_pairs = core::iter::repeat_with(|| { + KeyPair::generate().expect("Failed to generate key pair") + }) + .take(7) + .collect::>(); + let mut key_pairs_iter = key_pairs.iter(); + let peers = test_peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; + let topology = Topology::new(peers); + + let mut block = ValidBlock::new_dummy(); + let payload = block.payload().clone(); + key_pairs .iter() - .any(|tx| wsv.has_transaction(tx.value.hash())), + .enumerate() + .filter(|(i, _)| *i != 4) // Skip proxy tail + .map(|(_, key_pair)| SignatureOf::new(key_pair.clone(), &payload).expect("Failed to sign")) + .try_for_each(|signature| block.add_signature(signature)) + .expect("Failed to add signatures"); + + assert_eq!( + block.verify_signatures(&topology), + Err(SignatureVerificationError::ProxyTailMissing) + ) } } } -/// Revalidate merkle tree root hashes of the transaction -fn revalidate_hashes( - transactions: &[TransactionValue], - transactions_hash: Option>>, - rejected_transactions_hash: Option>>, -) -> Result<(), BlockRevalidationError> { - // Validate that header transactions hashes are matched with actual hashes - transactions - .iter() - .filter(|tx| tx.error.is_none()) - .map(|tx| tx.value.hash()) - .collect::>() - .hash() - .eq(&transactions_hash) - .then_some(()) - .ok_or_else(|| BlockRevalidationError::TransactionHashMismatch)?; - - transactions - .iter() - .filter(|tx| tx.error.is_some()) - .map(|tx| tx.value.hash()) - .collect::>() - .hash() - .eq(&rejected_transactions_hash) - .then_some(()) - .ok_or_else(|| BlockRevalidationError::RejectedTransactionHashMismatch)?; - Ok(()) -} +mod commit { + use super::*; -/// Revalidate transactions to ensure that valid transactions indeed valid and invalid are still invalid -fn revalidate_transactions( - transactions: &[TransactionValue], - wsv: &mut WorldStateView, - transaction_validator: TransactionValidator, - is_genesis: bool, -) -> Result<(), TransactionRevalidationError> { - // Check that valid transactions are still valid - for tx in transactions.iter().cloned() { - if tx.error.is_some() { - let _rejected_tx = if is_genesis { - Ok(AcceptedTransaction::accept_genesis(GenesisTransaction( - tx.value, - ))) - } else { - AcceptedTransaction::accept(tx.value, &transaction_validator.transaction_limits) - } - .map_err(TransactionRevalidationError::Accept) - .and_then(|tx| match transaction_validator.validate(tx, wsv) { - Err(rejected_transaction) => Ok(rejected_transaction), - Ok(_) => Err(TransactionRevalidationError::RejectedIsValid), - })?; - } else { - let tx = if is_genesis { - Ok(AcceptedTransaction::accept_genesis(GenesisTransaction( - tx.value, - ))) - } else { - AcceptedTransaction::accept(tx.value, &transaction_validator.transaction_limits) - } - .map_err(TransactionRevalidationError::Accept)?; + /// Represents a block accepted by consensus. + /// Every [`Self`] will have a different height. + #[derive(Debug, Clone)] + // TODO: Make it pub(super) at most + pub struct CommittedBlock(pub(crate) SignedBlock); - transaction_validator - .validate(tx, wsv) - .map_err(|(_tx, error)| error) - .map_err(TransactionRevalidationError::NotValid)?; + impl CommittedBlock { + /// Calculate block hash + pub fn hash(&self) -> HashOf { + self.0.hash() + } + /// Get block payload + pub fn payload(&self) -> &BlockPayload { + self.0.payload() + } + /// Get block signatures + pub fn signatures(&self) -> &SignaturesOf { + self.0.signatures() } } - Ok(()) -} + impl CommittedBlock { + pub(crate) fn produce_events(&self) -> Vec { + let tx = self.payload().transactions.iter().map(|tx| { + let status = tx.error.as_ref().map_or_else( + || PipelineStatus::Committed, + |error| PipelineStatus::Rejected(error.clone().into()), + ); -impl From<&PendingBlock> for Vec { - fn from(block: &PendingBlock) -> Self { - block - .transactions - .iter() - .map(|transaction| -> Event { PipelineEvent { entity_kind: PipelineEntityKind::Transaction, - status: PipelineStatus::Validating, - hash: transaction.payload().hash().into(), + status, + hash: tx.payload().hash().into(), } - .into() - }) - .chain([PipelineEvent { + }); + let current_block = core::iter::once(PipelineEvent { entity_kind: PipelineEntityKind::Block, - status: PipelineStatus::Validating, - hash: block.partial_hash().into(), - } - .into()]) - .collect() + status: PipelineStatus::Committed, + hash: self.hash().into(), + }); + + tx.chain(current_block).collect() + } + } + + impl From for ValidBlock { + fn from(source: CommittedBlock) -> Self { + ValidBlock(source.0) + } + } + + impl From for SignedBlock { + fn from(source: CommittedBlock) -> Self { + source.0 + } + } + + // Invariants of [`CommittedBlock`] can't be violated through immutable reference + impl AsRef for CommittedBlock { + fn as_ref(&self) -> &SignedBlock { + &self.0 + } } } #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use std::str::FromStr as _; use iroha_data_model::prelude::*; use super::*; - use crate::{kura::Kura, smartcontracts::isi::Registrable as _}; + use crate::{kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _}; #[test] pub fn committed_and_valid_block_hashes_are_equal() { - let valid_block = PendingBlock::new_dummy(); - let committed_block = valid_block.clone().commit_unchecked(); + let valid_block = ValidBlock::new_dummy(); + let topology = Topology::new(UniqueVec::new()); + let committed_block = valid_block.clone().commit(&topology).unwrap(); assert_eq!( - *valid_block.partial_hash(), - committed_block.partial_hash().internal + valid_block.payload().hash(), + committed_block.payload().hash() ) } - #[test] - fn should_reject_due_to_repetition() { + #[tokio::test] + async fn should_reject_due_to_repetition() { // Predefined world state let alice_id = AccountId::from_str("alice@wonderland").expect("Valid"); let alice_keys = KeyPair::generate().expect("Valid"); @@ -621,17 +727,18 @@ mod tests { let domain_id = DomainId::from_str("wonderland").expect("Valid"); let mut domain = Domain::new(domain_id).build(&alice_id); assert!(domain.add_account(account).is_none()); - let world = World::with([domain], Vec::new()); + let world = World::with([domain], UniqueVec::new()); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world, kura); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world, kura, query_handle); // Creating an instruction let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset_definition = - RegisterBox::new(AssetDefinition::quantity(asset_definition_id)); + RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)); // Making two transactions that have the same instruction - let transaction_limits = &wsv.transaction_validator().transaction_limits; + let transaction_limits = &wsv.transaction_executor().transaction_limits; let tx = TransactionBuilder::new(alice_id) .with_instructions([create_asset_definition]) .sign(alice_keys.clone()) @@ -640,25 +747,21 @@ mod tests { // Creating a block of two identical transactions and validating it let transactions = vec![tx.clone(), tx]; - let valid_block = BlockBuilder { - transactions, - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: Topology::new(Vec::new()), - key_pair: alice_keys, - wsv: &mut wsv, - } - .build(); + let topology = Topology::new(UniqueVec::new()); + let valid_block = BlockBuilder::new(transactions, topology, Vec::new()) + .chain(0, &mut wsv) + .sign(alice_keys) + .expect("Valid"); // The first transaction should be confirmed - assert!(valid_block.transactions[0].error.is_none()); + assert!(valid_block.payload().transactions[0].error.is_none()); // The second transaction should be rejected - assert!(valid_block.transactions[1].error.is_some()); + assert!(valid_block.payload().transactions[1].error.is_some()); } - #[test] - fn tx_order_same_in_validation_and_revalidation() { + #[tokio::test] + async fn tx_order_same_in_validation_and_revalidation() { // Predefined world state let alice_id = AccountId::from_str("alice@wonderland").expect("Valid"); let alice_keys = KeyPair::generate().expect("Valid"); @@ -667,17 +770,18 @@ mod tests { let domain_id = DomainId::from_str("wonderland").expect("Valid"); let mut domain = Domain::new(domain_id).build(&alice_id); assert!(domain.add_account(account).is_none()); - let world = World::with([domain], Vec::new()); + let world = World::with([domain], UniqueVec::new()); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world, kura); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world, kura, query_handle); // Creating an instruction let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); let create_asset_definition = - RegisterBox::new(AssetDefinition::quantity(asset_definition_id.clone())); + RegisterExpr::new(AssetDefinition::quantity(asset_definition_id.clone())); // Making two transactions that have the same instruction - let transaction_limits = &wsv.transaction_validator().transaction_limits; + let transaction_limits = &wsv.transaction_executor().transaction_limits; let tx = TransactionBuilder::new(alice_id.clone()) .with_instructions([create_asset_definition]) .sign(alice_keys.clone()) @@ -687,12 +791,12 @@ mod tests { let quantity: u32 = 200; let fail_quantity: u32 = 20; - let fail_mint = MintBox::new( + let fail_mint = MintExpr::new( fail_quantity.to_value(), IdBox::AssetId(AssetId::new(asset_definition_id.clone(), alice_id.clone())), ); - let succeed_mint = MintBox::new( + let succeed_mint = MintExpr::new( quantity.to_value(), IdBox::AssetId(AssetId::new(asset_definition_id, alice_id.clone())), ); @@ -711,27 +815,21 @@ mod tests { // Creating a block of two identical transactions and validating it let transactions = vec![tx0, tx, tx2]; - let valid_block = BlockBuilder { - transactions, - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: Topology::new(Vec::new()), - key_pair: alice_keys, - wsv: &mut wsv.clone(), - } - .build(); + let topology = Topology::new(UniqueVec::new()); + let valid_block = BlockBuilder::new(transactions, topology, Vec::new()) + .chain(0, &mut wsv) + .sign(alice_keys) + .expect("Valid"); // The first transaction should fail - assert!(valid_block.transactions[0].error.is_some()); + assert!(valid_block.payload().transactions[0].error.is_some()); // The third transaction should succeed - assert!(valid_block.transactions[2].error.is_none()); - - valid_block.revalidate(&mut wsv).unwrap(); + assert!(valid_block.payload().transactions[2].error.is_none()); } - #[test] - fn failed_transactions_revert() { + #[tokio::test] + async fn failed_transactions_revert() { // Predefined world state let alice_id = AccountId::from_str("alice@wonderland").expect("Valid"); let alice_keys = KeyPair::generate().expect("Valid"); @@ -743,20 +841,21 @@ mod tests { domain.add_account(account).is_none(), "`alice@wonderland` already exist in the blockchain" ); - let world = World::with([domain], Vec::new()); + let world = World::with([domain], UniqueVec::new()); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world, kura); - let transaction_limits = &wsv.transaction_validator().transaction_limits; + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world, kura, query_handle); + let transaction_limits = &wsv.transaction_executor().transaction_limits; let domain_id = DomainId::from_str("domain").expect("Valid"); - let create_domain = RegisterBox::new(Domain::new(domain_id)); + let create_domain = RegisterExpr::new(Domain::new(domain_id)); let asset_definition_id = AssetDefinitionId::from_str("coin#domain").expect("Valid"); - let create_asset = RegisterBox::new(AssetDefinition::quantity(asset_definition_id)); - let instructions_fail: [InstructionBox; 2] = [ + let create_asset = RegisterExpr::new(AssetDefinition::quantity(asset_definition_id)); + let instructions_fail: [InstructionExpr; 2] = [ create_domain.clone().into(), - FailBox::new("Always fail").into(), + Fail::new("Always fail").into(), ]; - let instructions_accept: [InstructionBox; 2] = [create_domain.into(), create_asset.into()]; + let instructions_accept: [InstructionExpr; 2] = [create_domain.into(), create_asset.into()]; let tx_fail = TransactionBuilder::new(alice_id.clone()) .with_instructions(instructions_fail) .sign(alice_keys.clone()) @@ -770,25 +869,21 @@ mod tests { // Creating a block of where first transaction must fail and second one fully executed let transactions = vec![tx_fail, tx_accept]; - let valid_block = BlockBuilder { - transactions, - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: Topology::new(Vec::new()), - key_pair: alice_keys, - wsv: &mut wsv, - } - .build(); + let topology = Topology::new(UniqueVec::new()); + let valid_block = BlockBuilder::new(transactions, topology, Vec::new()) + .chain(0, &mut wsv) + .sign(alice_keys) + .expect("Valid"); // The first transaction should be rejected assert!( - valid_block.transactions[0].error.is_some(), - "The first transaction should be rejected, as it contains `FailBox`." + valid_block.payload().transactions[0].error.is_some(), + "The first transaction should be rejected, as it contains `Fail`." ); // The second transaction should be accepted assert!( - valid_block.transactions[1].error.is_none(), + valid_block.payload().transactions[1].error.is_none(), "The second transaction should be accepted." ); } diff --git a/core/src/block_sync.rs b/core/src/block_sync.rs index 9eefb40334a..1f8d8fe1fcb 100644 --- a/core/src/block_sync.rs +++ b/core/src/block_sync.rs @@ -1,18 +1,12 @@ //! This module contains structures and messages for synchronization of blocks between peers. -#![allow( - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects -)] use std::{fmt::Debug, sync::Arc, time::Duration}; use iroha_config::block_sync::Configuration; use iroha_crypto::HashOf; -use iroha_data_model::{block::VersionedCommittedBlock, prelude::*}; +use iroha_data_model::{block::SignedBlock, prelude::*}; use iroha_logger::prelude::*; use iroha_macro::*; use iroha_p2p::Post; -use iroha_version::prelude::*; use parity_scale_codec::{Decode, Encode}; use tokio::sync::mpsc; @@ -44,8 +38,8 @@ pub struct BlockSynchronizer { gossip_period: Duration, block_batch_size: u32, network: IrohaNetwork, - latest_hash: Option>, - previous_hash: Option>, + latest_hash: Option>, + previous_hash: Option>, } impl BlockSynchronizer { @@ -136,38 +130,13 @@ pub mod message { use super::*; use crate::sumeragi::view_change::ProofChain; - declare_versioned_with_scale!(VersionedMessage 1..2, Debug, Clone, iroha_macro::FromVariant); - - impl VersionedMessage { - /// Convert from `&VersionedMessage` to V1 reference - pub const fn as_v1(&self) -> &Message { - match self { - Self::V1(v1) => v1, - } - } - - /// Convert from `&mut VersionedMessage` to V1 mutable reference - pub fn as_mut_v1(&mut self) -> &mut Message { - match self { - Self::V1(v1) => v1, - } - } - - /// Performs the conversion from `VersionedMessage` to V1 - pub fn into_v1(self) -> Message { - match self { - Self::V1(v1) => v1, - } - } - } - /// Get blocks after some block #[derive(Debug, Clone, Decode, Encode)] pub struct GetBlocksAfter { /// Hash of latest available block - pub latest_hash: Option>, + pub latest_hash: Option>, /// Hash of second to latest block - pub previous_hash: Option>, + pub previous_hash: Option>, /// Peer id pub peer_id: PeerId, } @@ -175,8 +144,8 @@ pub mod message { impl GetBlocksAfter { /// Construct [`GetBlocksAfter`]. pub const fn new( - latest_hash: Option>, - previous_hash: Option>, + latest_hash: Option>, + previous_hash: Option>, peer_id: PeerId, ) -> Self { Self { @@ -191,20 +160,19 @@ pub mod message { #[derive(Debug, Clone, Decode, Encode)] pub struct ShareBlocks { /// Blocks - pub blocks: Vec, + pub blocks: Vec, /// Peer id pub peer_id: PeerId, } impl ShareBlocks { /// Construct [`ShareBlocks`]. - pub const fn new(blocks: Vec, peer_id: PeerId) -> Self { + pub const fn new(blocks: Vec, peer_id: PeerId) -> Self { Self { blocks, peer_id } } } /// Message's variants that are used by peers to communicate in the process of consensus. - #[version_with_scale(version = 1, versioned_alias = "VersionedMessage")] #[derive(Debug, Clone, Decode, Encode, FromVariant)] pub enum Message { /// Request for blocks after the block with `Hash` for the peer with `PeerId`. @@ -249,7 +217,7 @@ pub mod message { .take(1 + block_sync.block_batch_size as usize) .map_while(|height| block_sync.kura.get_block_by_height(height)) .skip_while(|block| Some(block.hash()) == *latest_hash) - .map(|block| VersionedCommittedBlock::clone(&block)) + .map(|block| SignedBlock::clone(&block)) .collect::>(); if blocks.is_empty() { @@ -280,7 +248,7 @@ pub mod message { #[iroha_futures::telemetry_future] #[log("TRACE")] pub async fn send_to(self, network: &IrohaNetwork, peer: PeerId) { - let data = NetworkMessage::BlockSync(Box::new(VersionedMessage::from(self))); + let data = NetworkMessage::BlockSync(Box::new(self)); let message = Post { data, peer_id: peer.clone(), diff --git a/core/src/validator.rs b/core/src/executor.rs similarity index 61% rename from core/src/validator.rs rename to core/src/executor.rs index 22fec3ec897..971f7dd7867 100644 --- a/core/src/validator.rs +++ b/core/src/executor.rs @@ -1,12 +1,13 @@ -//! Structures and impls related to *runtime* `Validator`s processing. +//! Structures and impls related to *runtime* `Executor`s processing. use derive_more::DebugCustom; use iroha_data_model::{ account::AccountId, - isi::InstructionBox, + executor as data_model_executor, + isi::InstructionExpr, query::QueryBox, - transaction::{Executable, VersionedSignedTransaction}, - validator as data_model_validator, ValidationFail, + transaction::{Executable, SignedTransaction}, + ValidationFail, }; use iroha_logger::trace; use serde::{ @@ -27,15 +28,17 @@ impl From for ValidationFail { match call_error { ExecutionLimitsExceeded(_) => Self::TooComplex, - HostExecution(error) | Other(error) => Self::InternalError(error.to_string()), + HostExecution(error) | Other(error) => { + Self::InternalError(format!("{error:#}")) + } } } - _ => Self::InternalError(err.to_string()), + _ => Self::InternalError(format!("{err:#}")), } } } -/// Error used in [`migrate()`](Validator::migrate). +/// Error used in [`migrate()`](Executor::migrate). #[derive(Debug, thiserror::Error)] pub enum MigrationError { /// Error during WASM blob loading or runtime preparation. @@ -43,40 +46,40 @@ pub enum MigrationError { Wasm(#[from] wasm::error::Error), /// Error returned by entrypoint during execution. #[error("Entrypoint returned error: {0}")] - EntrypointExecution(data_model_validator::MigrationError), + EntrypointExecution(data_model_executor::MigrationError), } -/// Validator that verifies that operation is valid and executes it. +/// Executor that verifies that operation is valid and executes it. /// /// Executing is done in order to verify dependent instructions in transaction. /// So in fact it's more like an **Executor**, and it probably will be renamed soon. /// /// Can be upgraded with [`Upgrade`](iroha_data_model::isi::Upgrade) instruction. #[derive(Debug, Default, Clone, Serialize)] -pub enum Validator { - /// Initial validator that allows all operations and performs no permission checking. +pub enum Executor { + /// Initial executor that allows all operations and performs no permission checking. #[default] Initial, - /// User-provided validator with arbitrary logic. - UserProvided(UserProvidedValidator), + /// User-provided executor with arbitrary logic. + UserProvided(UserProvidedExecutor), } -/// Validator provided by user. +/// Executor provided by user. /// /// Used to not to leak private data to the user. #[derive(Debug, Clone, Serialize)] #[serde(transparent)] -pub struct UserProvidedValidator(LoadedValidator); +pub struct UserProvidedExecutor(LoadedExecutor); -impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Validator> { - type Value = Validator; +impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Executor> { + type Value = Executor; fn deserialize(self, deserializer: D) -> Result where D: Deserializer<'de>, { - struct ValidatorVisitor<'l> { - loader: &'l WasmSeed<'l, Validator>, + struct ExecutorVisitor<'l> { + loader: &'l WasmSeed<'l, Executor>, } #[derive(Deserialize)] @@ -86,8 +89,8 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Validator> { UserProvided, } - impl<'de> Visitor<'de> for ValidatorVisitor<'_> { - type Value = Validator; + impl<'de> Visitor<'de> for ExecutorVisitor<'_> { + type Value = Executor; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { formatter.write_str("an enum variant") @@ -100,12 +103,12 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Validator> { match data.variant()? { ("Initial", variant) => { variant.unit_variant()?; - Ok(Validator::Initial) + Ok(Executor::Initial) } ("UserProvided", variant) => { let loaded = - variant.newtype_variant_seed(self.loader.cast::())?; - Ok(Validator::UserProvided(UserProvidedValidator(loaded))) + variant.newtype_variant_seed(self.loader.cast::())?; + Ok(Executor::UserProvided(UserProvidedExecutor(loaded))) } (other, _) => Err(serde::de::Error::unknown_variant( other, @@ -116,26 +119,26 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Validator> { } deserializer.deserialize_enum( - "Validator", + "Executor", &["Initial", "UserProvided"], - ValidatorVisitor { loader: &self }, + ExecutorVisitor { loader: &self }, ) } } -impl Validator { - /// Validate [`VersionedSignedTransaction`]. +impl Executor { + /// Validate [`SignedTransaction`]. /// /// # Errors /// /// - Failed to prepare runtime for WASM execution; /// - Failed to execute the entrypoint of the WASM blob; - /// - Validator denied the operation. + /// - Executor denied the operation. pub fn validate_transaction( &self, wsv: &mut WorldStateView, authority: &AccountId, - transaction: VersionedSignedTransaction, + transaction: SignedTransaction, ) -> Result<(), ValidationFail> { trace!("Running transaction validation"); @@ -150,51 +153,51 @@ impl Validator { } Ok(()) } - Self::UserProvided(UserProvidedValidator(loaded_validator)) => { + Self::UserProvided(UserProvidedExecutor(loaded_executor)) => { let runtime = - wasm::RuntimeBuilder::::new() + wasm::RuntimeBuilder::::new() .with_engine(wsv.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs .with_configuration(wsv.config.wasm_runtime_config) .build()?; - runtime.execute_validator_validate_transaction( + runtime.execute_executor_validate_transaction( wsv, authority, - &loaded_validator.module, + &loaded_executor.module, transaction, )? } } } - /// Validate [`InstructionBox`]. + /// Validate [`InstructionExpr`]. /// /// # Errors /// /// - Failed to prepare runtime for WASM execution; /// - Failed to execute the entrypoint of the WASM blob; - /// - Validator denied the operation. + /// - Executor denied the operation. pub fn validate_instruction( &self, wsv: &mut WorldStateView, authority: &AccountId, - instruction: InstructionBox, + instruction: InstructionExpr, ) -> Result<(), ValidationFail> { trace!("Running instruction validation"); match self { Self::Initial => instruction.execute(authority, wsv).map_err(Into::into), - Self::UserProvided(UserProvidedValidator(loaded_validator)) => { + Self::UserProvided(UserProvidedExecutor(loaded_executor)) => { let runtime = - wasm::RuntimeBuilder::::new() + wasm::RuntimeBuilder::::new() .with_engine(wsv.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs .with_configuration(wsv.config.wasm_runtime_config) .build()?; - runtime.execute_validator_validate_instruction( + runtime.execute_executor_validate_instruction( wsv, authority, - &loaded_validator.module, + &loaded_executor.module, instruction, )? } @@ -207,7 +210,7 @@ impl Validator { /// /// - Failed to prepare runtime for WASM execution; /// - Failed to execute the entrypoint of the WASM blob; - /// - Validator denied the operation. + /// - Executor denied the operation. pub fn validate_query( &self, wsv: &WorldStateView, @@ -218,96 +221,96 @@ impl Validator { match self { Self::Initial => Ok(()), - Self::UserProvided(UserProvidedValidator(loaded_validator)) => { - let runtime = wasm::RuntimeBuilder::::new() + Self::UserProvided(UserProvidedExecutor(loaded_executor)) => { + let runtime = wasm::RuntimeBuilder::::new() .with_engine(wsv.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs .with_configuration(wsv.config.wasm_runtime_config) .build()?; - runtime.execute_validator_validate_query( + runtime.execute_executor_validate_query( wsv, authority, - &loaded_validator.module, + &loaded_executor.module, query, )? } } } - /// Migrate validator to a new user-provided one. + /// Migrate executor to a new user-provided one. /// - /// Execute `migrate()` entrypoint of the `raw_validator` and set `self` to - /// [`UserProvided`](Validator::UserProvided) with `raw_validator`. + /// Execute `migrate()` entrypoint of the `raw_executor` and set `self` to + /// [`UserProvided`](Executor::UserProvided) with `raw_executor`. /// /// # Errors /// - /// - Failed to load `raw_validator`; + /// - Failed to load `raw_executor`; /// - Failed to prepare runtime for WASM execution; /// - Failed to execute entrypoint of the WASM blob. pub fn migrate( &mut self, - raw_validator: data_model_validator::Validator, + raw_executor: data_model_executor::Executor, wsv: &mut WorldStateView, authority: &AccountId, ) -> Result<(), MigrationError> { - trace!("Running validator migration"); + trace!("Running executor migration"); - let loaded_validator = LoadedValidator::load(&wsv.engine, raw_validator)?; + let loaded_executor = LoadedExecutor::load(&wsv.engine, raw_executor)?; - let runtime = wasm::RuntimeBuilder::::new() + let runtime = wasm::RuntimeBuilder::::new() .with_engine(wsv.engine.clone()) // Cloning engine is cheap, see [`wasmtime::Engine`] docs .with_configuration(wsv.config.wasm_runtime_config) .build()?; runtime - .execute_validator_migration(wsv, authority, &loaded_validator.module)? + .execute_executor_migration(wsv, authority, &loaded_executor.module)? .map_err(MigrationError::EntrypointExecution)?; - *self = Self::UserProvided(UserProvidedValidator(loaded_validator)); + *self = Self::UserProvided(UserProvidedExecutor(loaded_executor)); Ok(()) } } -/// [`Validator`] with [`Module`](wasmtime::Module) for execution. +/// [`Executor`] with [`Module`](wasmtime::Module) for execution. /// -/// Creating a [`wasmtime::Module`] is expensive, so we do it once on [`migrate()`](Validator::migrate) +/// Creating a [`wasmtime::Module`] is expensive, so we do it once on [`migrate()`](Executor::migrate) /// step and reuse it later on validating steps. #[derive(DebugCustom, Clone, Serialize)] -#[debug(fmt = "LoadedValidator {{ module: }}")] -struct LoadedValidator { +#[debug(fmt = "LoadedExecutor {{ module: }}")] +struct LoadedExecutor { #[serde(skip)] module: wasmtime::Module, - raw_validator: data_model_validator::Validator, + raw_executor: data_model_executor::Executor, } -impl LoadedValidator { +impl LoadedExecutor { pub fn load( engine: &wasmtime::Engine, - raw_validator: data_model_validator::Validator, + raw_executor: data_model_executor::Executor, ) -> Result { Ok(Self { - module: wasm::load_module(engine, &raw_validator.wasm)?, - raw_validator, + module: wasm::load_module(engine, &raw_executor.wasm)?, + raw_executor, }) } } -impl<'de> DeserializeSeed<'de> for WasmSeed<'_, LoadedValidator> { - type Value = LoadedValidator; +impl<'de> DeserializeSeed<'de> for WasmSeed<'_, LoadedExecutor> { + type Value = LoadedExecutor; fn deserialize(self, deserializer: D) -> Result where D: serde::Deserializer<'de>, { - struct LoadedValidatorVisitor<'l> { - loader: &'l WasmSeed<'l, LoadedValidator>, + struct LoadedExecutorVisitor<'l> { + loader: &'l WasmSeed<'l, LoadedExecutor>, } - impl<'de> Visitor<'de> for LoadedValidatorVisitor<'_> { - type Value = LoadedValidator; + impl<'de> Visitor<'de> for LoadedExecutorVisitor<'_> { + type Value = LoadedExecutor; fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { - formatter.write_str("struct LoadedValidator") + formatter.write_str("struct LoadedExecutor") } fn visit_map(self, mut map: M) -> Result @@ -315,19 +318,19 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, LoadedValidator> { M: MapAccess<'de>, { while let Some(key) = map.next_key::()? { - if key.as_str() == "raw_validator" { - let validator: data_model_validator::Validator = map.next_value()?; - return Ok(LoadedValidator::load(self.loader.engine, validator).unwrap()); + if key.as_str() == "raw_executor" { + let executor: data_model_executor::Executor = map.next_value()?; + return Ok(LoadedExecutor::load(self.loader.engine, executor).unwrap()); } } - Err(serde::de::Error::missing_field("raw_validator")) + Err(serde::de::Error::missing_field("raw_executor")) } } deserializer.deserialize_struct( - "LoadedValidator", - &["raw_validator"], - LoadedValidatorVisitor { loader: &self }, + "LoadedExecutor", + &["raw_executor"], + LoadedExecutorVisitor { loader: &self }, ) } } diff --git a/core/src/gossiper.rs b/core/src/gossiper.rs index 34c22ca1b59..7f1ea21b690 100644 --- a/core/src/gossiper.rs +++ b/core/src/gossiper.rs @@ -3,7 +3,7 @@ use std::{sync::Arc, time::Duration}; use iroha_config::sumeragi::Configuration; -use iroha_data_model::transaction::VersionedSignedTransaction; +use iroha_data_model::transaction::SignedTransaction; use iroha_p2p::Broadcast; use parity_scale_codec::{Decode, Encode}; use tokio::sync::mpsc; @@ -76,7 +76,6 @@ impl TransactionGossiper { async fn run(mut self, mut message_receiver: mpsc::Receiver) { let mut gossip_period = tokio::time::interval(self.gossip_period); - #[allow(clippy::arithmetic_side_effects)] loop { tokio::select! { _ = gossip_period.tick() => self.gossip_transactions(), @@ -140,7 +139,7 @@ impl TransactionGossiper { #[derive(Decode, Encode, Debug, Clone)] pub struct TransactionGossip { /// Batch of transactions. - pub txs: Vec, + pub txs: Vec, } impl TransactionGossip { diff --git a/core/src/kura.rs b/core/src/kura.rs index 0a648f36095..671b0c958c8 100644 --- a/core/src/kura.rs +++ b/core/src/kura.rs @@ -1,29 +1,32 @@ //! Translates to warehouse. File-system and persistence-related //! logic. [`Kura`] is the main entity which should be used to store -//! new [`Block`](`crate::block::VersionedCommittedBlock`)s on the +//! new [`Block`](`crate::block::SignedBlock`)s on the //! blockchain. -#![allow(clippy::std_instead_of_alloc, clippy::arithmetic_side_effects)] use std::{ fmt::Debug, fs, - io::{Read, Seek, SeekFrom, Write}, + io::{BufWriter, Read, Seek, SeekFrom, Write}, path::{Path, PathBuf}, sync::Arc, }; use iroha_config::kura::Mode; -use iroha_crypto::HashOf; -use iroha_data_model::block::VersionedCommittedBlock; +use iroha_crypto::{Hash, HashOf}; +use iroha_data_model::block::SignedBlock; use iroha_logger::prelude::*; use iroha_version::scale::{DecodeVersioned, EncodeVersioned}; +use parity_scale_codec::DecodeAll; use parking_lot::Mutex; -use crate::handler::ThreadHandler; +use crate::{block::CommittedBlock, handler::ThreadHandler}; const INDEX_FILE_NAME: &str = "blocks.index"; const DATA_FILE_NAME: &str = "blocks.data"; +const HASHES_FILE_NAME: &str = "blocks.hashes"; const LOCK_FILE_NAME: &str = "kura.lock"; +const SIZE_OF_BLOCK_HASH: u64 = Hash::LENGTH as u64; + /// The interface of Kura subsystem #[derive(Debug)] pub struct Kura { @@ -34,12 +37,7 @@ pub struct Kura { block_store: Mutex, /// The array of block hashes and a slot for an arc of the block. This is normally recovered from the index file. #[allow(clippy::type_complexity)] - block_data: Mutex< - Vec<( - HashOf, - Option>, - )>, - >, + block_data: Mutex, Option>)>>, /// Path to file for plain text blocks. block_plain_text_path: Option, } @@ -113,24 +111,70 @@ impl Kura { /// - data in file storage is invalid or corrupted #[iroha_logger::log(skip_all, name = "kura_init")] pub fn init(self: &Arc) -> Result { - let block_store = self.block_store.lock(); + let mut block_store = self.block_store.lock(); let block_index_count: usize = block_store .read_index_count()? .try_into() .expect("We don't have 4 billion blocks."); + + let block_hashes = match self.mode { + Mode::Fast => Kura::init_fast_mode(&block_store, block_index_count).or_else(|error| { + warn!(%error, "Hashes file is broken. Falling back to strict init mode."); + Kura::init_strict_mode(&mut block_store, block_index_count) + }), + Mode::Strict => Kura::init_strict_mode(&mut block_store, block_index_count), + }?; + + let block_count = block_hashes.len(); + info!(mode=?self.mode, block_count, "Kura init complete"); + + // The none value is set in order to indicate that the blocks exist on disk but + // are not yet loaded. + *self.block_data.lock() = block_hashes.into_iter().map(|hash| (hash, None)).collect(); + Ok(BlockCount(block_count)) + } + + fn init_fast_mode( + block_store: &BlockStore, + block_index_count: usize, + ) -> Result>, Error> { + let block_hashes_count = block_store + .read_hashes_count()? + .try_into() + .expect("We don't have 4 billion blocks."); + if block_hashes_count == block_index_count { + block_store.read_block_hashes(0, block_hashes_count) + } else { + Err(Error::HashesFileHeightMismatch) + } + } + + fn init_strict_mode( + block_store: &mut BlockStore, + block_index_count: usize, + ) -> Result>, Error> { + let mut block_hashes = Vec::with_capacity(block_index_count); + let mut block_indices = vec![BlockIndex::default(); block_index_count]; block_store.read_block_indices(0, &mut block_indices)?; - let mut block_hashes: Vec> = Vec::new(); + let mut previous_block_hash = None; for block in block_indices { // This is re-allocated every iteration. This could cause a problem. let mut block_data_buffer = vec![0_u8; block.length.try_into()?]; match block_store.read_block_data(block.start, &mut block_data_buffer) { - Ok(_) => match VersionedCommittedBlock::decode_all_versioned(&block_data_buffer) { + Ok(_) => match SignedBlock::decode_all_versioned(&block_data_buffer) { Ok(decoded_block) => { - block_hashes.push(decoded_block.hash()); + if previous_block_hash != decoded_block.payload().header.previous_block_hash + { + error!("Block has wrong previous block hash. Not reading any blocks beyond this height."); + break; + } + let decoded_block_hash = decoded_block.hash(); + block_hashes.push(decoded_block_hash); + previous_block_hash = Some(decoded_block_hash); } Err(error) => { error!(?error, "Encountered malformed block. Not reading any blocks beyond this height."); @@ -143,16 +187,12 @@ impl Kura { } } } - let block_count = block_hashes.len(); - info!(block_count, "Kura init complete"); - // The none value is set in order to indicate that the blocks exist on disk but - // are not yet loaded. - *self.block_data.lock() = block_hashes.into_iter().map(|hash| (hash, None)).collect(); - Ok(BlockCount(block_count)) + block_store.overwrite_block_hashes(&block_hashes)?; + + Ok(block_hashes) } - #[allow(clippy::expect_used, clippy::cognitive_complexity, clippy::panic)] #[iroha_logger::log(skip_all)] fn kura_receive_blocks_loop( kura: &Kura, @@ -227,9 +267,7 @@ impl Kura { } for block in blocks_to_be_written { - let serialized_block: Vec = block.encode_versioned(); - - if let Err(error) = block_store_guard.append_block_to_chain(&serialized_block) { + if let Err(error) = block_store_guard.append_block_to_chain(&block) { error!(?error, "Failed to store block"); panic!("Kura has encountered a fatal IO error."); } @@ -238,8 +276,7 @@ impl Kura { } /// Get the hash of the block at the provided height. - #[allow(clippy::expect_used)] - pub fn get_block_hash(&self, block_height: u64) -> Option> { + pub fn get_block_hash(&self, block_height: u64) -> Option> { let hash_data_guard = self.block_data.lock(); if block_height == 0 || block_height > hash_data_guard.len() as u64 { return None; @@ -251,7 +288,7 @@ impl Kura { } /// Search through blocks for the height of the block with the given hash. - pub fn get_block_height_by_hash(&self, hash: &HashOf) -> Option { + pub fn get_block_height_by_hash(&self, hash: &HashOf) -> Option { self.block_data .lock() .iter() @@ -260,10 +297,9 @@ impl Kura { } /// Get a reference to block by height, loading it from disk if needed. - #[allow(clippy::expect_used)] // The below lint suggests changing the code into something that does not compile due // to the borrow checker. - pub fn get_block_by_height(&self, block_height: u64) -> Option> { + pub fn get_block_by_height(&self, block_height: u64) -> Option> { let mut data_array_guard = self.block_data.lock(); if block_height == 0 || block_height > data_array_guard.len() as u64 { return None; @@ -286,8 +322,7 @@ impl Kura { block_store .read_block_data(start, &mut block_buf) .expect("Failed to read block data."); - let block = VersionedCommittedBlock::decode_all_versioned(&block_buf) - .expect("Failed to decode block"); + let block = SignedBlock::decode_all_versioned(&block_buf).expect("Failed to decode block"); let block_arc = Arc::new(block); data_array_guard[block_number].1 = Some(Arc::clone(&block_arc)); @@ -299,10 +334,7 @@ impl Kura { /// Internally this function searches linearly for the block's height and /// then calls `get_block_by_height`. If you know the height of the block, /// call `get_block_by_height` directly. - pub fn get_block_by_hash( - &self, - block_hash: &HashOf, - ) -> Option> { + pub fn get_block_by_hash(&self, block_hash: &HashOf) -> Option> { let index = self .block_data .lock() @@ -313,17 +345,17 @@ impl Kura { } /// Put a block in kura's in memory block store. - pub fn store_block(&self, block: VersionedCommittedBlock) { - self.block_data - .lock() - .push((block.hash(), Some(Arc::new(block)))); + pub fn store_block(&self, block: CommittedBlock) { + let block = Arc::new(SignedBlock::from(block)); + self.block_data.lock().push((block.hash(), Some(block))); } /// Replace the block in `Kura`'s in memory block store. - pub fn replace_top_block(&self, block: VersionedCommittedBlock) { + pub fn replace_top_block(&self, block: CommittedBlock) { + let block = Arc::new(SignedBlock::from(block)); let mut data = self.block_data.lock(); data.pop(); - data.push((block.hash(), Some(Arc::new(block)))); + data.push((block.hash(), Some(block))); } } @@ -368,20 +400,20 @@ impl BlockStore { /// /// # Panics /// * if you pass in `LockStatus::Unlocked` and it is unable to lock the block store. - pub fn new(path: &Path, already_locked: LockStatus) -> Self { + pub fn new(store_path: &Path, already_locked: LockStatus) -> Self { if matches!(already_locked, LockStatus::Unlocked) { - let path = path.join(LOCK_FILE_NAME); + let lock_path = store_path.join(LOCK_FILE_NAME); if let Err(e) = fs::File::options() .read(true) .write(true) .create_new(true) - .open(path.clone()) + .open(&lock_path) { match e.kind() { - std::io::ErrorKind::AlreadyExists => Err(Error::Locked(path)), + std::io::ErrorKind::AlreadyExists => Err(Error::Locked(lock_path)), std::io::ErrorKind::NotFound => { - match std::fs::create_dir_all(&path) - .map_err(|e| Error::MkDir(e, path.clone())) + match std::fs::create_dir_all(store_path) + .map_err(|e| Error::MkDir(e, store_path.to_path_buf())) { Err(e) => Err(e), Ok(_) => { @@ -389,22 +421,22 @@ impl BlockStore { .read(true) .write(true) .create_new(true) - .open(path.clone()) + .open(&lock_path) { - Err(Error::IO(e, path)) + Err(Error::IO(e, lock_path)) } else { Ok(()) } } } } - _ => Err(Error::IO(e, path)), + _ => Err(Error::IO(e, lock_path)), } .expect("Kura must be able to lock the blockstore"); } } BlockStore { - path_to_blockchain: path.to_path_buf(), + path_to_blockchain: store_path.to_path_buf(), } } @@ -422,15 +454,12 @@ impl BlockStore { let mut index_file = std::fs::OpenOptions::new() .read(true) .open(path.clone()) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; let start_location = start_block_height * (2 * std::mem::size_of::() as u64); let block_count = dest_buffer.len(); if start_location + (2 * std::mem::size_of::() as u64) * block_count as u64 - > index_file - .metadata() - .map_err(|e| Error::IO(e, path.clone()))? - .len() + > index_file.metadata().add_err_context(&path)?.len() { return Err(Error::OutOfBoundsBlockRead { start_block_height, @@ -439,25 +468,23 @@ impl BlockStore { } index_file .seek(SeekFrom::Start(start_location)) - .map_err(|e| Error::IO(e, path.clone()))?; - let mut buffer = [0; core::mem::size_of::()]; + .add_err_context(&path)?; // (start, length), (start,length) ... for current_buffer in dest_buffer.iter_mut() { + let mut buffer = [0; core::mem::size_of::()]; + *current_buffer = BlockIndex { start: { - index_file - .read_exact(&mut buffer) - .map_err(|e| Error::IO(e, path.clone()))?; + index_file.read_exact(&mut buffer).add_err_context(&path)?; u64::from_le_bytes(buffer) }, length: { - index_file - .read_exact(&mut buffer) - .map_err(|e| Error::IO(e, path.clone()))?; + index_file.read_exact(&mut buffer).add_err_context(&path)?; u64::from_le_bytes(buffer) }, }; } + Ok(()) } @@ -492,12 +519,71 @@ impl BlockStore { let index_file = std::fs::OpenOptions::new() .read(true) .open(path.clone()) - .map_err(|e| Error::IO(e, path.clone()))?; - Ok(index_file.metadata().map_err(|e| Error::IO(e, path))?.len() + .add_err_context(&path)?; + Ok(index_file.metadata().add_err_context(&path)?.len() / (2 * std::mem::size_of::() as u64)) // Each entry is 16 bytes. } + /// Read a series of block hashes from the block hashes file + /// + /// # Errors + /// IO Error. + pub fn read_block_hashes( + &self, + start_block_height: u64, + block_count: usize, + ) -> Result>> { + let path = self.path_to_blockchain.join(HASHES_FILE_NAME); + let mut hashes_file = std::fs::OpenOptions::new() + .read(true) + .open(path.clone()) + .add_err_context(&path)?; + let start_location = start_block_height * SIZE_OF_BLOCK_HASH; + + if start_location + (SIZE_OF_BLOCK_HASH) * block_count as u64 + > hashes_file.metadata().add_err_context(&path)?.len() + { + return Err(Error::OutOfBoundsBlockRead { + start_block_height, + block_count, + }); + } + hashes_file + .seek(SeekFrom::Start(start_location)) + .add_err_context(&path)?; + + (0..block_count) + .map(|_| { + let mut buffer = [0; Hash::LENGTH]; + + hashes_file + .read_exact(&mut buffer) + .add_err_context(&path) + .and_then(|_| HashOf::decode_all(&mut buffer.as_slice()).map_err(Error::Codec)) + }) + .collect() + } + + /// Get the number of hashes in the hashes file, which is + /// calculated as the size of the hashes file in bytes divided by + /// `size_of(HashOf)`. + /// + /// # Errors + /// IO Error. + /// + /// The most common reason this function fails is + /// that you did not call `create_files_if_they_do_not_exist`. + #[allow(clippy::integer_division)] + pub fn read_hashes_count(&self) -> Result { + let path = self.path_to_blockchain.join(HASHES_FILE_NAME); + let hashes_file = std::fs::OpenOptions::new() + .read(true) + .open(path.clone()) + .add_err_context(&path)?; + Ok(hashes_file.metadata().add_err_context(&path)?.len() / SIZE_OF_BLOCK_HASH) + } + /// Read block data starting from the /// `start_location_in_data_file` in data file in order to fill /// `dest_buffer`. @@ -513,13 +599,12 @@ impl BlockStore { let mut data_file = std::fs::OpenOptions::new() .read(true) .open(path.clone()) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; data_file .seek(SeekFrom::Start(start_location_in_data_file)) - .map_err(|e| Error::IO(e, path.clone()))?; - data_file - .read_exact(dest_buffer) - .map_err(|e| Error::IO(e, path))?; + .add_err_context(&path)?; + data_file.read_exact(dest_buffer).add_err_context(&path)?; + Ok(()) } @@ -535,29 +620,26 @@ impl BlockStore { .write(true) .create(true) .open(path.clone()) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; let start_location = block_height * (2 * std::mem::size_of::() as u64); if start_location + (2 * std::mem::size_of::() as u64) - > index_file - .metadata() - .map_err(|e| Error::IO(e, path.clone()))? - .len() + > index_file.metadata().add_err_context(&path)?.len() { index_file .set_len(start_location + (2 * std::mem::size_of::() as u64)) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; } index_file .seek(SeekFrom::Start(start_location)) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; // block0 | block1 // start, length| start, length ... et cetera. index_file .write_all(&start.to_le_bytes()) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; index_file .write_all(&length.to_le_bytes()) - .map_err(|e| Error::IO(e, path))?; + .add_err_context(&path)?; Ok(()) } @@ -577,11 +659,9 @@ impl BlockStore { let index_file = std::fs::OpenOptions::new() .write(true) .open(path.clone()) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; let new_byte_size = new_count * (2 * std::mem::size_of::() as u64); - index_file - .set_len(new_byte_size) - .map_err(|e| Error::IO(e, path))?; + index_file.set_len(new_byte_size).add_err_context(&path)?; Ok(()) } @@ -600,23 +680,69 @@ impl BlockStore { let mut data_file = std::fs::OpenOptions::new() .write(true) .open(path.clone()) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; if start_location_in_data_file + block_data.len() as u64 - > data_file - .metadata() - .map_err(|e| Error::IO(e, path.clone()))? - .len() + > data_file.metadata().add_err_context(&path)?.len() { data_file .set_len(start_location_in_data_file + block_data.len() as u64) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; } data_file .seek(SeekFrom::Start(start_location_in_data_file)) - .map_err(|e| Error::IO(e, path.clone()))?; - data_file - .write_all(block_data) - .map_err(|e| Error::IO(e, path.clone()))?; + .add_err_context(&path)?; + data_file.write_all(block_data).add_err_context(&path)?; + Ok(()) + } + + /// Write the hash of a single block at the specified `block_height`. + /// If `block_height` is beyond the end of the index file, attempt to + /// extend the index file. + /// + /// # Errors + /// IO Error. + pub fn write_block_hash(&mut self, block_height: u64, hash: HashOf) -> Result<()> { + let path = self.path_to_blockchain.join(HASHES_FILE_NAME); + let mut hashes_file = std::fs::OpenOptions::new() + .write(true) + .create(true) + .open(path.clone()) + .add_err_context(&path)?; + let start_location = block_height * SIZE_OF_BLOCK_HASH; + if start_location + SIZE_OF_BLOCK_HASH + > hashes_file.metadata().add_err_context(&path)?.len() + { + hashes_file + .set_len(start_location + SIZE_OF_BLOCK_HASH) + .add_err_context(&path)?; + } + hashes_file + .seek(SeekFrom::Start(start_location)) + .add_err_context(&path)?; + hashes_file + .write_all(hash.as_ref()) + .add_err_context(&path)?; + Ok(()) + } + + /// Write the hashes to the hashes file overwriting any previous hashes. + /// + /// # Errors + /// IO Error. + pub fn overwrite_block_hashes(&mut self, hashes: &[HashOf]) -> Result<()> { + let path = self.path_to_blockchain.join(HASHES_FILE_NAME); + let hashes_file = std::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(path.clone()) + .add_err_context(&path)?; + let mut hashes_file = BufWriter::new(hashes_file); + for hash in hashes { + hashes_file + .write_all(hash.as_ref()) + .add_err_context(&path)?; + } Ok(()) } @@ -634,13 +760,19 @@ impl BlockStore { .write(true) .create(true) .open(path.clone()) - .map_err(|e| Error::IO(e, path))?; + .add_err_context(&path)?; let path = self.path_to_blockchain.join(DATA_FILE_NAME); std::fs::OpenOptions::new() .write(true) .create(true) .open(path.clone()) - .map_err(|e| Error::IO(e, path))?; + .add_err_context(&path)?; + let path = self.path_to_blockchain.join(HASHES_FILE_NAME); + std::fs::OpenOptions::new() + .write(true) + .create(true) + .open(path.clone()) + .add_err_context(&path)?; Ok(()) } @@ -651,7 +783,8 @@ impl BlockStore { /// # Errors /// Fails if any of the required platform-specific functions /// fail. - pub fn append_block_to_chain(&mut self, block_data: &[u8]) -> Result<()> { + pub fn append_block_to_chain(&mut self, block: &SignedBlock) -> Result<()> { + let bytes = block.encode_versioned(); let new_block_height = self.read_index_count()?; let start_location_in_data_file = if new_block_height == 0 { 0 @@ -660,12 +793,13 @@ impl BlockStore { ultimate_block.start + ultimate_block.length }; - self.write_block_data(start_location_in_data_file, block_data)?; + self.write_block_data(start_location_in_data_file, &bytes)?; self.write_block_index( new_block_height, start_location_in_data_file, - block_data.len() as u64, + bytes.len() as u64, )?; + self.write_block_hash(new_block_height, block.hash())?; Ok(()) } @@ -680,7 +814,7 @@ pub enum Error { /// Failed to create the directory {1:?} MkDir(#[source] std::io::Error, PathBuf), /// Failed to serialize/deserialize block - Codec(#[from] iroha_version::error::Error), + Codec(#[from] parity_scale_codec::Error), /// Failed to allocate buffer Alloc(#[from] std::collections::TryReserveError), /// Tried reading block data out of bounds: {start_block_height}, {block_count} @@ -694,15 +828,31 @@ pub enum Error { Locked(PathBuf), /// Conversion of wide integer into narrow integer failed. This error cannot be caught at compile time at present IntConversion(#[from] std::num::TryFromIntError), + /// Blocks count differs hashes file and index file + HashesFileHeightMismatch, +} + +trait AddErrContextExt { + type Context; + + fn add_err_context(self, context: &Self::Context) -> Result; +} + +impl AddErrContextExt for Result { + type Context = PathBuf; + + fn add_err_context(self, path: &Self::Context) -> Result { + self.map_err(|e| Error::IO(e, path.clone())) + } } -#[allow(clippy::unwrap_used)] #[cfg(test)] mod tests { use tempfile::TempDir; use super::*; + use crate::block::ValidBlock; fn indices(value: [(u64, u64); N]) -> [BlockIndex; N] { let mut ret = [BlockIndex { @@ -802,29 +952,66 @@ mod tests { let mut block_store = BlockStore::new(dir.path(), LockStatus::Unlocked); block_store.create_files_if_they_do_not_exist().unwrap(); + let dummy_block = ValidBlock::new_dummy().into(); + let append_count = 35; for _ in 0..append_count { - block_store - .append_block_to_chain(b"A hypothetical block") - .unwrap(); + block_store.append_block_to_chain(&dummy_block).unwrap(); } assert_eq!(append_count, block_store.read_index_count().unwrap()); } + #[test] + fn append_block_to_chain_increases_hashes_count() { + let dir = tempfile::tempdir().unwrap(); + let mut block_store = BlockStore::new(dir.path(), LockStatus::Unlocked); + block_store.create_files_if_they_do_not_exist().unwrap(); + + let dummy_block = ValidBlock::new_dummy().into(); + + let append_count = 35; + for _ in 0..append_count { + block_store.append_block_to_chain(&dummy_block).unwrap(); + } + + assert_eq!(append_count, block_store.read_hashes_count().unwrap()); + } + + #[test] + fn append_block_to_chain_write_correct_hashes() { + let dir = tempfile::tempdir().unwrap(); + let mut block_store = BlockStore::new(dir.path(), LockStatus::Unlocked); + block_store.create_files_if_they_do_not_exist().unwrap(); + + let dummy_block = ValidBlock::new_dummy().into(); + + let append_count = 35; + for _ in 0..append_count { + block_store.append_block_to_chain(&dummy_block).unwrap(); + } + + let block_hashes = block_store.read_block_hashes(0, append_count).unwrap(); + + for hash in block_hashes { + assert_eq!(hash, dummy_block.hash()) + } + } + #[test] fn append_block_to_chain_places_blocks_correctly_in_data_file() { let dir = tempfile::tempdir().unwrap(); let mut block_store = BlockStore::new(dir.path(), LockStatus::Unlocked); block_store.create_files_if_they_do_not_exist().unwrap(); - let block_data = b"some block data"; + let dummy_block = ValidBlock::new_dummy().into(); let append_count = 35; for _ in 0..append_count { - block_store.append_block_to_chain(block_data).unwrap(); + block_store.append_block_to_chain(&dummy_block).unwrap(); } + let block_data = dummy_block.encode_versioned(); for i in 0..append_count { let BlockIndex { start, length } = block_store.read_block_index(i).unwrap(); assert_eq!(i * block_data.len() as u64, start); diff --git a/core/src/lib.rs b/core/src/lib.rs index 4ba13ecfca6..e0c6109e31f 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -2,28 +2,30 @@ pub mod block; pub mod block_sync; +pub mod executor; pub mod gossiper; pub mod kura; pub mod modules; +pub mod query; pub mod queue; pub mod smartcontracts; pub mod snapshot; pub mod sumeragi; pub mod tx; -pub mod validator; pub mod wsv; use core::time::Duration; -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap, HashSet}; use gossiper::TransactionGossip; use iroha_data_model::{permission::Permissions, prelude::*}; +use iroha_primitives::unique_vec::UniqueVec; use parity_scale_codec::{Decode, Encode}; use tokio::sync::broadcast; use crate::{ - block_sync::message::VersionedMessage as BlockSyncMessage, prelude::*, - sumeragi::message::VersionedPacket as SumeragiPacket, + block_sync::message::Message as BlockSyncMessage, prelude::*, + sumeragi::message::MessagePacket as SumeragiPacket, }; /// The interval at which sumeragi checks if there are tx in the `queue`. @@ -33,20 +35,23 @@ pub const TX_RETRIEVAL_INTERVAL: Duration = Duration::from_millis(100); pub type IrohaNetwork = iroha_p2p::NetworkHandle; /// Ids of peers. -pub type PeersIds = HashSet<::Id>; +pub type PeersIds = UniqueVec; /// Parameters set. pub type Parameters = HashSet; /// API to work with collections of [`DomainId`] : [`Domain`] mappings. -pub type DomainsMap = HashMap<::Id, Domain>; +pub type DomainsMap = HashMap; /// API to work with a collections of [`RoleId`]: [`Role`] mappings. -pub type RolesMap = HashMap<::Id, Role>; +pub type RolesMap = HashMap; /// API to work with a collections of [`AccountId`] [`Permissions`] mappings. pub type PermissionTokensMap = HashMap; +/// API to work with a collections of [`AccountId`] to [`RoleId`] mappings. +pub type AccountRolesSet = BTreeSet; + /// Type of `Sender` which should be used for channels of `Event` messages. pub type EventsSender = broadcast::Sender; @@ -100,6 +105,62 @@ pub mod handler { } } +pub mod role { + //! Module with extension for [`RoleId`] to be stored inside wsv. + + use derive_more::Constructor; + use iroha_primitives::impl_as_dyn_key; + use serde::{Deserialize, Serialize}; + + use super::*; + + /// [`RoleId`] with owner [`AccountId`] attached to it. + #[derive( + Debug, + Clone, + Constructor, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Decode, + Encode, + Deserialize, + Serialize, + )] + pub struct RoleIdWithOwner { + /// [`AccountId`] of the owner. + pub account_id: AccountId, + /// [`RoleId`] of the given role. + pub role_id: RoleId, + } + + /// Reference to [`RoleIdWithOwner`]. + #[derive(Debug, Clone, Copy, Constructor, PartialEq, Eq, PartialOrd, Ord, Hash)] + pub struct RoleIdWithOwnerRef<'role> { + /// [`AccountId`] of the owner. + pub account_id: &'role AccountId, + /// [`RoleId`] of the given role. + pub role_id: &'role RoleId, + } + + impl AsRoleIdWithOwnerRef for RoleIdWithOwner { + fn as_key(&self) -> RoleIdWithOwnerRef<'_> { + RoleIdWithOwnerRef { + account_id: &self.account_id, + role_id: &self.role_id, + } + } + } + + impl_as_dyn_key! { + target: RoleIdWithOwner, + key: RoleIdWithOwnerRef<'_>, + trait: AsRoleIdWithOwnerRef + } +} + pub mod prelude { //! Re-exports important traits and types. Meant to be glob imported when using `Iroha`. @@ -113,3 +174,47 @@ pub mod prelude { wsv::{World, WorldStateView}, }; } + +#[cfg(test)] +mod tests { + use std::cmp::Ordering; + + use iroha_data_model::{account::AccountId, role::RoleId}; + + use crate::role::RoleIdWithOwner; + + #[test] + fn cmp_role_id_with_owner() { + let role_id_a: RoleId = "a".parse().expect("failed to parse RoleId"); + let role_id_b: RoleId = "b".parse().expect("failed to parse RoleId"); + let account_id_a: AccountId = "a@domain".parse().expect("failed to parse AccountId"); + let account_id_b: AccountId = "b@domain".parse().expect("failed to parse AccountId"); + + let mut role_ids_with_owner = Vec::new(); + for account_id in [&account_id_a, &account_id_b] { + for role_id in [&role_id_a, &role_id_b] { + role_ids_with_owner.push(RoleIdWithOwner { + role_id: role_id.clone(), + account_id: account_id.clone(), + }) + } + } + + for role_id_with_owner_1 in &role_ids_with_owner { + for role_id_with_owner_2 in &role_ids_with_owner { + match ( + role_id_with_owner_1.account_id.cmp(&role_id_with_owner_2.account_id), + role_id_with_owner_1.role_id.cmp(&role_id_with_owner_2.role_id), + ) { + // `AccountId` take precedence in comparison + // if `AccountId`s are equal than comparison based on `RoleId`s + (Ordering::Equal, ordering) | (ordering, _) => assert_eq!( + role_id_with_owner_1.cmp(role_id_with_owner_2), + ordering, + "{role_id_with_owner_1:?} and {role_id_with_owner_2:?} are expected to be {ordering:?}" + ), + } + } + } + } +} diff --git a/cli/src/torii/cursor.rs b/core/src/query/cursor.rs similarity index 60% rename from cli/src/torii/cursor.rs rename to core/src/query/cursor.rs index a3d1aca610c..b1ef3393c8f 100644 --- a/cli/src/torii/cursor.rs +++ b/core/src/query/cursor.rs @@ -1,13 +1,19 @@ -use std::num::{NonZeroU64, NonZeroUsize}; +//! Module with cursor-based pagination functional like [`Batched`]. -use crate::torii::{Error, Result}; +use std::num::{NonZeroU32, NonZeroU64}; +use derive_more::Display; +use parity_scale_codec::{Decode, Encode}; +use serde::{Deserialize, Serialize}; + +/// Trait for iterators that can be batched. pub trait Batch: IntoIterator + Sized { - fn batched(self, fetch_size: NonZeroUsize) -> Batched; + /// Pack iterator into batches of the given size. + fn batched(self, fetch_size: NonZeroU32) -> Batched; } impl Batch for I { - fn batched(self, batch_size: NonZeroUsize) -> Batched { + fn batched(self, batch_size: NonZeroU32) -> Batched { Batched { iter: self.into_iter(), batch_size, @@ -21,14 +27,24 @@ impl Batch for I { #[derive(Debug)] pub struct Batched { iter: I::IntoIter, - batch_size: NonZeroUsize, + batch_size: NonZeroU32, cursor: Option, } +/// Unknown cursor error. +/// +/// Happens when client sends a cursor that doesn't match any server's cursor. +#[derive(Debug, Display, thiserror::Error, Copy, Clone, Serialize, Deserialize, Encode, Decode)] +#[display(fmt = "Unknown cursor")] +pub struct UnknownCursor; + impl> Batched { - pub(crate) fn next_batch(&mut self, cursor: Option) -> Result<(I, Option)> { + pub(crate) fn next_batch( + &mut self, + cursor: Option, + ) -> Result<(I, Option), UnknownCursor> { if cursor != self.cursor { - return Err(Error::UnknownCursor); + return Err(UnknownCursor); } let mut batch_size = 0; @@ -36,7 +52,12 @@ impl> Batched { .iter .by_ref() .inspect(|_| batch_size += 1) - .take(self.batch_size.get()) + .take( + self.batch_size + .get() + .try_into() + .expect("`u32` should always fit into `usize`"), + ) .collect(); self.cursor = if let Some(cursor) = self.cursor { @@ -72,6 +93,7 @@ impl> Batched { )) } + /// Check if all values where drained from the iterator. pub fn is_depleted(&self) -> bool { self.cursor.is_none() } diff --git a/core/src/query/mod.rs b/core/src/query/mod.rs new file mode 100644 index 00000000000..9b6de03c3e9 --- /dev/null +++ b/core/src/query/mod.rs @@ -0,0 +1,5 @@ +//! This module contains [`QueryService`](service::QueryService) and helpers. + +pub mod cursor; +pub mod pagination; +pub mod store; diff --git a/cli/src/torii/pagination.rs b/core/src/query/pagination.rs similarity index 97% rename from cli/src/torii/pagination.rs rename to core/src/query/pagination.rs index e305edbaec1..d2cc87e88f8 100644 --- a/cli/src/torii/pagination.rs +++ b/core/src/query/pagination.rs @@ -1,3 +1,5 @@ +//! Module with [`Paginate`] iterator adaptor which provides [`paginate`] function. + use iroha_data_model::query::Pagination; /// Describes a collection to which pagination can be applied. diff --git a/core/src/query/store.rs b/core/src/query/store.rs new file mode 100644 index 00000000000..432f3ab142c --- /dev/null +++ b/core/src/query/store.rs @@ -0,0 +1,344 @@ +//! This module contains [`LiveQueryStore`] actor. + +use std::{ + cmp::Ordering, + collections::HashMap, + num::NonZeroU64, + time::{Duration, Instant}, +}; + +use iroha_config::live_query_store::Configuration; +use iroha_data_model::{ + asset::AssetValue, + query::{ + cursor::ForwardCursor, error::QueryExecutionFail, pagination::Pagination, sorting::Sorting, + FetchSize, QueryId, DEFAULT_FETCH_SIZE, MAX_FETCH_SIZE, + }, + BatchedResponse, BatchedResponseV1, HasMetadata, IdentifiableBox, ValidationFail, Value, +}; +use parity_scale_codec::{Decode, Encode}; +use serde::{Deserialize, Serialize}; +use tokio::sync::{mpsc, oneshot}; + +use super::{ + cursor::{Batch as _, Batched, UnknownCursor}, + pagination::Paginate as _, +}; +use crate::smartcontracts::query::LazyValue; + +/// Query service error. +#[derive(Debug, thiserror::Error, Copy, Clone, Serialize, Deserialize, Encode, Decode)] +pub enum Error { + /// Unknown cursor error. + #[error(transparent)] + UnknownCursor(#[from] UnknownCursor), + /// Connection with LiveQueryStore is closed. + #[error("Connection with LiveQueryStore is closed")] + ConnectionClosed, + /// Fetch size is too big. + #[error("Fetch size is too big")] + FetchSizeTooBig, +} + +#[allow(clippy::fallible_impl_from)] +impl From for ValidationFail { + fn from(error: Error) -> Self { + match error { + Error::UnknownCursor(_) => { + ValidationFail::QueryFailed(QueryExecutionFail::UnknownCursor) + } + Error::ConnectionClosed => { + panic!("Connection to `LiveQueryStore` was unexpectedly closed, this is a bug") + } + Error::FetchSizeTooBig => { + ValidationFail::QueryFailed(QueryExecutionFail::FetchSizeTooBig) + } + } + } +} + +/// Result type for [`LiveQueryStore`] methods. +pub type Result = std::result::Result; + +type LiveQuery = Batched>; + +/// Service which stores queries which might be non fully consumed by a client. +/// +/// Clients can handle their queries using [`LiveQueryStoreHandle`] +#[derive(Debug)] +pub struct LiveQueryStore { + queries: HashMap, + query_idle_time: Duration, +} + +impl LiveQueryStore { + /// Construct [`LiveQueryStore`] from configuration. + pub fn from_configuration(cfg: Configuration) -> Self { + Self { + queries: HashMap::default(), + query_idle_time: Duration::from_millis(cfg.query_idle_time_ms.into()), + } + } + + /// Construct [`LiveQueryStore`] for tests. + /// Default configuration will be used. + /// + /// Not marked as `#[cfg(test)]` because it is used in benches as well. + pub fn test() -> Self { + use iroha_config::base::proxy::Builder as _; + + LiveQueryStore::from_configuration( + iroha_config::live_query_store::ConfigurationProxy::default() + .build() + .expect("Failed to build LiveQueryStore configuration from proxy"), + ) + } + + /// Start [`LiveQueryStore`]. Requires a [`tokio::runtime::Runtime`] being run + /// as it will create new [`tokio::task`] and detach it. + /// + /// Returns a handle to interact with the service. + pub fn start(mut self) -> LiveQueryStoreHandle { + const ALL_HANDLERS_DROPPED: &str = + "All handler to LiveQueryStore are dropped. Shutting down..."; + + let (message_sender, mut message_receiver) = mpsc::channel(1); + + let mut idle_interval = tokio::time::interval(self.query_idle_time); + + tokio::task::spawn(async move { + loop { + tokio::select! { + _ = idle_interval.tick() => { + self.queries + .retain(|_, (_, last_access_time)| last_access_time.elapsed() <= self.query_idle_time); + }, + msg = message_receiver.recv() => { + let Some(msg) = msg else { + iroha_logger::info!("{ALL_HANDLERS_DROPPED}"); + break; + }; + + match msg { + Message::Insert(query_id, live_query) => { + self.insert(query_id, live_query) + } + Message::Remove(query_id, response_sender) => { + let live_query_opt = self.remove(&query_id); + let _ = response_sender.send(live_query_opt); + } + } + } + else => break, + } + tokio::task::yield_now().await; + } + }); + + LiveQueryStoreHandle { message_sender } + } + + fn insert(&mut self, query_id: QueryId, live_query: LiveQuery) { + self.queries.insert(query_id, (live_query, Instant::now())); + } + + fn remove(&mut self, query_id: &str) -> Option { + self.queries.remove(query_id).map(|(output, _)| output) + } +} + +enum Message { + Insert(QueryId, Batched>), + Remove(QueryId, oneshot::Sender>>>), +} + +/// Handle to interact with [`LiveQueryStore`]. +#[derive(Clone)] +pub struct LiveQueryStoreHandle { + message_sender: mpsc::Sender, +} + +impl LiveQueryStoreHandle { + /// Apply sorting and pagination to the query output. + /// + /// # Errors + /// + /// - Returns [`Error::ConnectionClosed`] if [`LiveQueryStore`] is dropped, + /// - Otherwise throws up query output handling errors. + pub fn handle_query_output( + &self, + query_output: LazyValue<'_>, + sorting: &Sorting, + pagination: Pagination, + fetch_size: FetchSize, + ) -> Result> { + match query_output { + LazyValue::Value(batch) => { + let cursor = ForwardCursor::default(); + let result = BatchedResponseV1 { batch, cursor }; + Ok(result.into()) + } + LazyValue::Iter(iter) => { + let fetch_size = fetch_size.fetch_size.unwrap_or(DEFAULT_FETCH_SIZE); + if fetch_size > MAX_FETCH_SIZE { + return Err(Error::FetchSizeTooBig); + } + + let live_query = Self::apply_sorting_and_pagination(iter, sorting, pagination); + let query_id = uuid::Uuid::new_v4().to_string(); + + let curr_cursor = Some(0); + let live_query = live_query.batched(fetch_size); + self.construct_query_response(query_id, curr_cursor, live_query) + } + } + } + + /// Retrieve next batch of query output using `cursor`. + /// + /// # Errors + /// + /// - Returns [`Error::ConnectionClosed`] if [`LiveQueryStore`] is dropped, + /// - Otherwise throws up query output handling errors. + pub fn handle_query_cursor(&self, cursor: ForwardCursor) -> Result> { + let query_id = cursor.query_id.ok_or(UnknownCursor)?; + let live_query = self.remove(query_id.clone())?.ok_or(UnknownCursor)?; + + self.construct_query_response(query_id, cursor.cursor.map(NonZeroU64::get), live_query) + } + + /// Remove query from the storage if there is any. + /// + /// Returns `true` if query was removed, `false` otherwise. + /// + /// # Errors + /// + /// - Returns [`Error::ConnectionClosed`] if [`QueryService`] is dropped, + /// - Otherwise throws up query output handling errors. + pub fn drop_query(&self, query_id: QueryId) -> Result { + self.remove(query_id).map(|query_opt| query_opt.is_some()) + } + + fn insert(&self, query_id: QueryId, live_query: LiveQuery) -> Result<()> { + self.message_sender + .blocking_send(Message::Insert(query_id, live_query)) + .map_err(|_| Error::ConnectionClosed) + } + + fn remove(&self, query_id: QueryId) -> Result> { + let (sender, receiver) = oneshot::channel(); + + self.message_sender + .blocking_send(Message::Remove(query_id, sender)) + .or(Err(Error::ConnectionClosed))?; + + receiver.blocking_recv().or(Err(Error::ConnectionClosed)) + } + + fn construct_query_response( + &self, + query_id: QueryId, + curr_cursor: Option, + mut live_query: Batched>, + ) -> Result> { + let (batch, next_cursor) = live_query.next_batch(curr_cursor)?; + + if !live_query.is_depleted() { + self.insert(query_id.clone(), live_query)? + } + + let query_response = BatchedResponseV1 { + batch: Value::Vec(batch), + cursor: ForwardCursor { + query_id: Some(query_id), + cursor: next_cursor, + }, + }; + + Ok(query_response.into()) + } + + fn apply_sorting_and_pagination( + iter: impl Iterator, + sorting: &Sorting, + pagination: Pagination, + ) -> Vec { + if let Some(key) = &sorting.sort_by_metadata_key { + let mut pairs: Vec<(Option, Value)> = iter + .map(|value| { + let key = match &value { + Value::Identifiable(IdentifiableBox::Asset(asset)) => match asset.value() { + AssetValue::Store(store) => store.get(key).cloned(), + _ => None, + }, + Value::Identifiable(v) => TryInto::<&dyn HasMetadata>::try_into(v) + .ok() + .and_then(|has_metadata| has_metadata.metadata().get(key)) + .cloned(), + _ => None, + }; + (key, value) + }) + .collect(); + pairs.sort_by( + |(left_key, _), (right_key, _)| match (left_key, right_key) { + (Some(l), Some(r)) => l.cmp(r), + (Some(_), None) => Ordering::Less, + (None, Some(_)) => Ordering::Greater, + (None, None) => Ordering::Equal, + }, + ); + pairs + .into_iter() + .map(|(_, val)| val) + .paginate(pagination) + .collect() + } else { + iter.paginate(pagination).collect() + } + } +} + +#[cfg(test)] +mod tests { + use std::num::NonZeroU32; + + use super::*; + + #[test] + fn query_message_order_preserved() { + let query_store = LiveQueryStore::test(); + let threaded_rt = tokio::runtime::Runtime::new().unwrap(); + let query_store_handle = threaded_rt.block_on(async { query_store.start() }); + + for i in 0..10_000 { + let pagination = Pagination::default(); + let fetch_size = FetchSize { + fetch_size: NonZeroU32::new(1), + }; + let sorting = Sorting::default(); + + let query_output = LazyValue::Iter(Box::new((0..100).map(|_| Value::Bool(false)))); + + let mut counter = 0; + + let (batch, mut cursor) = query_store_handle + .handle_query_output(query_output, &sorting, pagination, fetch_size) + .unwrap() + .into(); + let Value::Vec(v) = batch else { panic!("not expected result") }; + counter += v.len(); + + while cursor.cursor.is_some() { + let Ok(batched) = query_store_handle.handle_query_cursor(cursor) else { break }; + let (batch, new_cursor) = batched.into(); + let Value::Vec(v) = batch else { panic!("not expected result") }; + counter += v.len(); + + cursor = new_cursor; + } + + assert_eq!(counter, 100, "failed on {i} iteration"); + } + } +} diff --git a/core/src/queue.rs b/core/src/queue.rs index 681a899bb3c..b9b36f793e7 100644 --- a/core/src/queue.rs +++ b/core/src/queue.rs @@ -1,11 +1,4 @@ //! Module with queue actor -#![allow( - clippy::module_name_repetitions, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects -)] - use core::time::Duration; use std::collections::HashSet; @@ -14,12 +7,7 @@ use dashmap::{mapref::entry::Entry, DashMap}; use eyre::{Report, Result}; use iroha_config::queue::Configuration; use iroha_crypto::HashOf; -use iroha_data_model::{ - account::{Account, AccountId}, - evaluate::ExpressionEvaluator as _, - expression::{EvaluatesTo, Where}, - transaction::prelude::*, -}; +use iroha_data_model::{account::AccountId, transaction::prelude::*}; use iroha_logger::{debug, info, trace, warn}; use iroha_primitives::must_use::MustUse; use rand::seq::IteratorRandom; @@ -32,16 +20,17 @@ impl AcceptedTransaction { fn check_signature_condition(&self, wsv: &WorldStateView) -> Result> { let authority = &self.payload().authority; - let signatories = self + let transaction_signatories = self .signatures() .iter() .map(|signature| signature.public_key()) - .cloned(); + .cloned() + .collect(); wsv.map_account(authority, |account| { - wsv.evaluate(&check_signature_condition(account, signatories)) - .map(MustUse::new) - .map_err(Into::into) + Ok(account + .signature_check_condition + .check(&account.signatories, &transaction_signatories)) })? } @@ -51,29 +40,6 @@ impl AcceptedTransaction { } } -fn check_signature_condition( - account: &Account, - signatories: impl IntoIterator, -) -> EvaluatesTo { - let where_expr = Where::new(EvaluatesTo::new_evaluates_to_value( - *account.signature_check_condition.0.expression.clone(), - )) - .with_value( - iroha_data_model::account::ACCOUNT_SIGNATORIES_VALUE - .parse() - .expect("ACCOUNT_SIGNATORIES_VALUE should be valid."), - account.signatories.iter().cloned().collect::>(), - ) - .with_value( - iroha_data_model::account::TRANSACTION_SIGNATORIES_VALUE - .parse() - .expect("TRANSACTION_SIGNATORIES_VALUE should be valid."), - signatories.into_iter().collect::>(), - ); - - EvaluatesTo::new_unchecked(where_expr) -} - /// Lockfree queue for transactions /// /// Multiple producers, single consumer @@ -409,27 +375,25 @@ impl Queue { #[cfg(test)] mod tests { - #![allow(clippy::restriction, clippy::all, clippy::pedantic)] - use std::{str::FromStr, sync::Arc, thread, time::Duration}; use iroha_config::{base::proxy::Builder, queue::ConfigurationProxy}; - use iroha_data_model::{ - account::{ACCOUNT_SIGNATORIES_VALUE, TRANSACTION_SIGNATORIES_VALUE}, - prelude::*, - transaction::TransactionLimits, - }; + use iroha_data_model::{prelude::*, transaction::TransactionLimits}; use iroha_primitives::must_use::MustUse; use rand::Rng as _; + use tokio::test; use super::*; - use crate::{kura::Kura, smartcontracts::isi::Registrable as _, wsv::World, PeersIds}; + use crate::{ + kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _, + wsv::World, PeersIds, + }; fn accepted_tx(account_id: &str, key: KeyPair) -> AcceptedTransaction { let message = std::iter::repeat_with(rand::random::) .take(16) .collect(); - let instructions = [FailBox { message }]; + let instructions = [Fail { message }]; let tx = TransactionBuilder::new(AccountId::from_str(account_id).expect("Valid")) .with_instructions(instructions) .sign(key) @@ -453,12 +417,14 @@ mod tests { } #[test] - fn push_tx() { + async fn push_tx() { let key_pair = KeyPair::generate().unwrap(); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let wsv = Arc::new(WorldStateView::new( world_with_test_domains([key_pair.public_key().clone()]), - kura.clone(), + kura, + query_handle, )); let queue = Queue::from_configuration(&Configuration { @@ -475,14 +441,16 @@ mod tests { } #[test] - fn push_tx_overflow() { + async fn push_tx_overflow() { let max_txs_in_queue = 10; let key_pair = KeyPair::generate().unwrap(); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let wsv = Arc::new(WorldStateView::new( world_with_test_domains([key_pair.public_key().clone()]), - kura.clone(), + kura, + query_handle, )); let queue = Queue::from_configuration(&Configuration { @@ -510,76 +478,26 @@ mod tests { } #[test] - fn push_tx_signature_condition_failure() { - let max_txs_in_queue = 10; - let key_pair = KeyPair::generate().unwrap(); - - let wsv = { - let domain_id = DomainId::from_str("wonderland").expect("Valid"); - let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); - let mut domain = Domain::new(domain_id.clone()).build(&account_id); - let mut account = Account::new(account_id.clone(), [key_pair.public_key().clone()]) - .build(&account_id); - // Cause `check_siganture_condition` failure by trying to convert `u32` to `bool` - account.signature_check_condition = - SignatureCheckCondition(EvaluatesTo::new_unchecked(0u32)); - assert!(domain.add_account(account).is_none()); - - let kura = Kura::blank_kura_for_testing(); - Arc::new(WorldStateView::new( - World::with([domain], PeersIds::new()), - kura.clone(), - )) - }; - - let queue = Queue::from_configuration(&Configuration { - transaction_time_to_live_ms: 100_000, - max_transactions_in_queue: max_txs_in_queue, - ..ConfigurationProxy::default() - .build() - .expect("Default queue config should always build") - }); - - assert!(matches!( - queue.push(accepted_tx("alice@wonderland", key_pair), &wsv), - Err(Failure { - err: Error::SignatureCondition { .. }, - .. - }) - )); - } - - #[test] - fn push_multisignature_tx() { + async fn push_multisignature_tx() { let max_txs_in_block = 2; let key_pairs = [KeyPair::generate().unwrap(), KeyPair::generate().unwrap()]; let kura = Kura::blank_kura_for_testing(); let wsv = { let domain_id = DomainId::from_str("wonderland").expect("Valid"); let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); - let mut domain = Domain::new(domain_id.clone()).build(&account_id); + let mut domain = Domain::new(domain_id).build(&account_id); let mut account = Account::new( account_id.clone(), key_pairs.iter().map(KeyPair::public_key).cloned(), ) .build(&account_id); - account.signature_check_condition = SignatureCheckCondition( - ContainsAll::new( - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str(TRANSACTION_SIGNATORIES_VALUE) - .expect("TRANSACTION_SIGNATORIES_VALUE should be valid."), - )), - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str(ACCOUNT_SIGNATORIES_VALUE) - .expect("ACCOUNT_SIGNATORIES_VALUE should be valid."), - )), - ) - .into(), - ); + account.signature_check_condition = SignatureCheckCondition::all_account_signatures(); assert!(domain.add_account(account).is_none()); + let query_handle = LiveQueryStore::test().start(); Arc::new(WorldStateView::new( World::with([domain], PeersIds::new()), - kura.clone(), + kura, + query_handle, )) }; @@ -590,7 +508,7 @@ mod tests { .build() .expect("Default queue config should always build") }); - let instructions: [InstructionBox; 0] = []; + let instructions: [InstructionExpr; 0] = []; let tx = TransactionBuilder::new("alice@wonderland".parse().expect("Valid")) .with_instructions(instructions); let tx_limits = TransactionLimits { @@ -600,7 +518,7 @@ mod tests { let fully_signed_tx: AcceptedTransaction = { let mut signed_tx = tx .clone() - .sign((&key_pairs[0]).clone()) + .sign(key_pairs[0].clone()) .expect("Failed to sign."); for key_pair in &key_pairs[1..] { signed_tx = signed_tx.sign(key_pair.clone()).expect("Failed to sign"); @@ -647,13 +565,15 @@ mod tests { } #[test] - fn get_available_txs() { + async fn get_available_txs() { let max_txs_in_block = 2; let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let wsv = Arc::new(WorldStateView::new( world_with_test_domains([alice_key.public_key().clone()]), - kura.clone(), + kura, + query_handle, )); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -674,12 +594,14 @@ mod tests { } #[test] - fn push_tx_already_in_blockchain() { + async fn push_tx_already_in_blockchain() { let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let mut wsv = WorldStateView::new( world_with_test_domains([alice_key.public_key().clone()]), - kura.clone(), + kura, + query_handle, ); let tx = accepted_tx("alice@wonderland", alice_key); wsv.transactions.insert(tx.hash(), 1); @@ -701,13 +623,15 @@ mod tests { } #[test] - fn get_tx_drop_if_in_blockchain() { + async fn get_tx_drop_if_in_blockchain() { let max_txs_in_block = 2; let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let mut wsv = WorldStateView::new( world_with_test_domains([alice_key.public_key().clone()]), - kura.clone(), + kura, + query_handle, ); let tx = accepted_tx("alice@wonderland", alice_key); let queue = Queue::from_configuration(&Configuration { @@ -729,13 +653,15 @@ mod tests { } #[test] - fn get_available_txs_with_timeout() { + async fn get_available_txs_with_timeout() { let max_txs_in_block = 6; let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let wsv = Arc::new(WorldStateView::new( world_with_test_domains([alice_key.public_key().clone()]), - kura.clone(), + kura, + query_handle, )); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 200, @@ -777,13 +703,15 @@ mod tests { // Queue should only drop transactions which are already committed or ttl expired. // Others should stay in the queue until that moment. #[test] - fn transactions_available_after_pop() { + async fn transactions_available_after_pop() { let max_txs_in_block = 2; let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let wsv = Arc::new(WorldStateView::new( world_with_test_domains([alice_key.public_key().clone()]), - kura.clone(), + kura, + query_handle, )); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -811,13 +739,15 @@ mod tests { } #[test] - fn custom_expired_transaction_is_rejected() { + async fn custom_expired_transaction_is_rejected() { let max_txs_in_block = 2; let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let wsv = Arc::new(WorldStateView::new( world_with_test_domains([alice_key.public_key().clone()]), - kura.clone(), + kura, + query_handle, )); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -826,7 +756,7 @@ mod tests { .build() .expect("Default queue config should always build") }); - let instructions = [FailBox { + let instructions = [Fail { message: "expired".to_owned(), }]; let mut tx = @@ -852,13 +782,15 @@ mod tests { } #[test] - fn concurrent_stress_test() { + async fn concurrent_stress_test() { let max_txs_in_block = 10; let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let wsv = WorldStateView::new( world_with_test_domains([alice_key.public_key().clone()]), - kura.clone(), + kura, + query_handle, ); let queue = Arc::new(Queue::from_configuration(&Configuration { @@ -881,12 +813,9 @@ mod tests { while start_time.elapsed() < run_for { let tx = accepted_tx("alice@wonderland", alice_key.clone()); match queue_arc_clone.push(tx, &wsv_clone) { - Ok(()) => (), - Err(Failure { - err: Error::Full, .. - }) => (), - Err(Failure { - err: Error::MaximumTransactionsPerUser, + Ok(()) + | Err(Failure { + err: Error::Full | Error::MaximumTransactionsPerUser, .. }) => (), Err(Failure { err, .. }) => panic!("{err}"), @@ -898,7 +827,7 @@ mod tests { // Spawn a thread where we get_transactions_for_block and add them to WSV let get_txs_handle = { let queue_arc_clone = Arc::clone(&queue); - let mut wsv_clone = wsv.clone(); + let mut wsv_clone = wsv; thread::spawn(move || { while start_time.elapsed() < run_for { @@ -926,14 +855,16 @@ mod tests { } #[test] - fn push_tx_in_future() { + async fn push_tx_in_future() { let future_threshold_ms = 1000; let alice_key = KeyPair::generate().expect("Failed to generate keypair."); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); let wsv = Arc::new(WorldStateView::new( world_with_test_domains([alice_key.public_key().clone()]), - kura.clone(), + kura, + query_handle, )); let queue = Queue::from_configuration(&Configuration { @@ -946,8 +877,7 @@ mod tests { let mut tx = accepted_tx("alice@wonderland", alice_key); assert!(queue.push(tx.clone(), &wsv).is_ok()); // tamper timestamp - let VersionedSignedTransaction::V1(tx_ref) = &mut tx.0; - tx_ref.payload.creation_time_ms += 2 * future_threshold_ms; + tx.0.payload_mut().creation_time_ms += 2 * future_threshold_ms; assert!(matches!( queue.push(tx, &wsv), Err(Failure { @@ -959,7 +889,7 @@ mod tests { } #[test] - fn queue_throttling() { + async fn queue_throttling() { let alice_key_pair = KeyPair::generate().unwrap(); let bob_key_pair = KeyPair::generate().unwrap(); let kura = Kura::blank_kura_for_testing(); @@ -980,7 +910,8 @@ mod tests { assert!(domain.add_account(bob_account).is_none()); World::with([domain], PeersIds::new()) }; - let mut wsv = WorldStateView::new(world, kura.clone()); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world, kura, query_handle); let queue = Queue::from_configuration(&Configuration { transaction_time_to_live_ms: 100_000, @@ -1012,8 +943,7 @@ mod tests { err: Error::MaximumTransactionsPerUser }), ), - "Failed to match: {:?}", - result, + "Failed to match: {result:?}", ); // First push by Bob should be fine despite previous Alice error @@ -1033,14 +963,11 @@ mod tests { // After cleanup Alice and Bob pushes should work fine queue - .push( - accepted_tx("alice@wonderland", alice_key_pair.clone()), - &wsv, - ) + .push(accepted_tx("alice@wonderland", alice_key_pair), &wsv) .expect("Failed to push tx into queue"); queue - .push(accepted_tx("bob@wonderland", bob_key_pair.clone()), &wsv) + .push(accepted_tx("bob@wonderland", bob_key_pair), &wsv) .expect("Failed to push tx into queue"); } } diff --git a/core/src/smartcontracts/isi/account.rs b/core/src/smartcontracts/isi/account.rs index c7771229c72..508772dbd6b 100644 --- a/core/src/smartcontracts/isi/account.rs +++ b/core/src/smartcontracts/isi/account.rs @@ -1,7 +1,7 @@ //! This module contains implementations of smart-contract traits and instructions for [`Account`] structure //! and implementations of [`Query`]'s to [`WorldStateView`] about [`Account`]. -use iroha_data_model::{asset::AssetsMap, prelude::*, query::error::FindError, role::RoleIds}; +use iroha_data_model::{asset::AssetsMap, prelude::*, query::error::FindError}; use iroha_telemetry::metrics; use super::prelude::*; @@ -19,7 +19,6 @@ impl Registrable for iroha_data_model::account::NewAccount { assets: AssetsMap::default(), signature_check_condition: SignatureCheckCondition::default(), metadata: self.metadata, - roles: RoleIds::default(), } } } @@ -40,8 +39,8 @@ pub mod isi { }; use super::*; + use crate::role::{AsRoleIdWithOwnerRef, RoleIdWithOwner, RoleIdWithOwnerRef}; - #[allow(clippy::expect_used, clippy::unwrap_in_result)] impl Execute for Register { #[metrics(+"register_asset")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { @@ -54,7 +53,7 @@ pub mod isi { { assert_can_register(&asset_id.definition_id, wsv, &self.object.value)?; let asset = wsv - .asset_or_insert(&asset_id, self.object.value) + .asset_or_insert(asset_id.clone(), self.object.value) .expect("Account exists"); match asset.value { @@ -131,7 +130,7 @@ pub mod isi { } } - impl Execute for Mint { + impl Execute for Mint { #[metrics(+"mint_account_public_key")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let account_id = self.destination_id; @@ -158,7 +157,7 @@ pub mod isi { } } - impl Execute for Burn { + impl Execute for Burn { #[metrics(+"burn_account_public_key")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let account_id = self.destination_id; @@ -185,7 +184,7 @@ pub mod isi { } } - impl Execute for Mint { + impl Execute for Mint { #[metrics(+"mint_account_signature_check_condition")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let account_id = self.destination_id; @@ -199,6 +198,21 @@ pub mod isi { } } + impl Execute for Transfer { + fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + wsv.asset_definition_mut(&self.object)?.owned_by = self.destination_id.clone(); + + wsv.emit_events(Some(AssetDefinitionEvent::OwnerChanged( + AssetDefinitionOwnerChanged { + asset_definition_id: self.object, + new_owner: self.destination_id, + }, + ))); + + Ok(()) + } + } + impl Execute for SetKeyValue { #[metrics(+"set_account_key_value")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { @@ -251,7 +265,7 @@ pub mod isi { } } - impl Execute for Grant { + impl Execute for Grant { #[metrics(+"grant_account_permission")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let account_id = self.destination_id; @@ -290,14 +304,14 @@ pub mod isi { } } - impl Execute for Revoke { + impl Execute for Revoke { #[metrics(+"revoke_account_permission")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let account_id = self.destination_id; let permission = self.object; // Check if account exists - wsv.account_mut(&account_id)?; + wsv.account(&account_id)?; if !wsv.remove_account_permission(&account_id, &permission) { return Err(FindError::PermissionToken(permission.definition_id).into()); @@ -314,7 +328,7 @@ pub mod isi { } } - impl Execute for Grant { + impl Execute for Grant { #[metrics(+"grant_account_role")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let account_id = self.destination_id; @@ -330,18 +344,19 @@ pub mod isi { .into_iter() .map(|token| token.definition_id); - wsv.account_mut(&account_id) - .map_err(Error::from) - .and_then(|account| { - if !account.add_role(role_id.clone()) { - return Err(RepetitionError { - instruction_type: InstructionType::Grant, - id: IdBox::RoleId(role_id.clone()), - } - .into()); - } - Ok(()) - })?; + wsv.account(&account_id)?; + + if !wsv + .world + .account_roles + .insert(RoleIdWithOwner::new(account_id.clone(), role_id.clone())) + { + return Err(RepetitionError { + instruction_type: InstructionType::Grant, + id: IdBox::RoleId(role_id), + } + .into()); + } wsv.emit_events({ let account_id_clone = account_id.clone(); @@ -364,7 +379,7 @@ pub mod isi { } } - impl Execute for Revoke { + impl Execute for Revoke { #[metrics(+"revoke_account_role")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let account_id = self.destination_id; @@ -380,12 +395,13 @@ pub mod isi { .into_iter() .map(|token| token.definition_id); - wsv.account_mut(&account_id).and_then(|account| { - if !account.remove_role(&role_id) { - return Err(FindError::Role(role_id.clone())); - } - Ok(()) - })?; + if !wsv + .world + .account_roles + .remove::(&RoleIdWithOwnerRef::new(&account_id, &role_id)) + { + return Err(FindError::Role(role_id).into()); + } wsv.emit_events({ let account_id_clone = account_id.clone(); @@ -485,11 +501,8 @@ pub mod query { .wrap_err("Failed to evaluate account id") .map_err(|e| Error::Evaluate(e.to_string()))?; iroha_logger::trace!(%account_id, roles=?wsv.world.roles); - Ok(Box::new( - wsv.map_account(&account_id, |account| &account.roles)? - .iter() - .cloned(), - )) + wsv.account(&account_id)?; + Ok(Box::new(wsv.account_roles(&account_id).cloned())) } } diff --git a/core/src/smartcontracts/isi/asset.rs b/core/src/smartcontracts/isi/asset.rs index fd1485dfd31..6d5fd0ccda3 100644 --- a/core/src/smartcontracts/isi/asset.rs +++ b/core/src/smartcontracts/isi/asset.rs @@ -50,11 +50,10 @@ pub mod isi { wsv.increase_asset_total_amount(&asset_id.definition_id, 1_u32)?; } - wsv.asset_or_insert(&asset_id, Metadata::new())?; let asset_metadata_limits = wsv.config.asset_metadata_limits; + let asset = wsv.asset_or_insert(asset_id.clone(), Metadata::new())?; { - let asset = wsv.asset_mut(&asset_id)?; let store: &mut Metadata = asset .try_as_mut() .map_err(eyre::Error::from) @@ -67,7 +66,7 @@ pub mod isi { } wsv.emit_events(Some(AssetEvent::MetadataInserted(MetadataChanged { - target_id: asset_id.clone(), + target_id: asset_id, key: self.key, value: Box::new(self.value), }))); @@ -95,7 +94,7 @@ pub mod isi { }; wsv.emit_events(Some(AssetEvent::MetadataRemoved(MetadataChanged { - target_id: asset_id.clone(), + target_id: asset_id, key: self.key, value: Box::new(value), }))); @@ -104,26 +103,11 @@ pub mod isi { } } - impl Execute for Transfer { - fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - wsv.asset_definition_mut(&self.object.id)?.owned_by = self.destination_id.clone(); - - wsv.emit_events(Some(AssetDefinitionEvent::OwnerChanged( - AssetDefinitionOwnerChanged { - asset_definition_id: self.object.id, - new_owner: self.destination_id, - }, - ))); - - Ok(()) - } - } - macro_rules! impl_mint { ($ty:ty, $metrics:literal) => { impl InnerMint for $ty {} - impl Execute for Mint { + impl Execute for Mint<$ty, Asset> { #[metrics(+$metrics)] fn execute( self, @@ -140,7 +124,7 @@ pub mod isi { ($ty:ty, $metrics:literal) => { impl InnerBurn for $ty {} - impl Execute for Burn { + impl Execute for Burn<$ty, Asset> { #[metrics(+$metrics)] fn execute( self, @@ -185,7 +169,7 @@ pub mod isi { /// Trait for blanket mint implementation. trait InnerMint { fn execute( - mint: Mint, + mint: Mint, _authority: &AccountId, wsv: &mut WorldStateView, ) -> Result<(), Error> @@ -209,12 +193,11 @@ pub mod isi { wsv, ::EXPECTED_VALUE_TYPE, )?; - wsv.asset_or_insert( - &asset_id, + let asset = wsv.asset_or_insert( + asset_id.clone(), ::DEFAULT_ASSET_VALUE, )?; let new_quantity = { - let asset = wsv.asset_mut(&asset_id)?; let quantity: &mut Self = asset .try_as_mut() .map_err(eyre::Error::from) @@ -232,7 +215,7 @@ pub mod isi { } wsv.emit_events(Some(AssetEvent::Added(AssetChanged { - asset_id: asset_id.clone(), + asset_id, amount: mint.object.into(), }))); @@ -243,7 +226,7 @@ pub mod isi { /// Trait for blanket burn implementation. trait InnerBurn { fn execute( - burn: Burn, + burn: Burn, _authority: &AccountId, wsv: &mut WorldStateView, ) -> Result<(), Error> @@ -309,21 +292,17 @@ pub mod isi { eyre::Error: From<>::Error>, Value: From, { - let source_id = &transfer.source_id; + let source_id = transfer.source_id; let destination_id = AssetId::new( source_id.definition_id.clone(), transfer.destination_id.clone(), ); - wsv.asset_or_insert( - &destination_id, - ::DEFAULT_ASSET_VALUE, - )?; { let account = wsv.account_mut(&source_id.account_id)?; let asset = account .assets - .get_mut(source_id) + .get_mut(&source_id) .ok_or_else(|| FindError::Asset(source_id.clone()))?; let quantity: &mut Self = asset .try_as_mut() @@ -333,13 +312,16 @@ pub mod isi { .checked_sub(transfer.object) .ok_or(MathError::NotEnoughQuantity)?; if asset.value.is_zero_value() { - assert!(account.remove_asset(source_id).is_some()); + assert!(account.remove_asset(&source_id).is_some()); } } + let destination_asset = wsv.asset_or_insert( + destination_id.clone(), + ::DEFAULT_ASSET_VALUE, + )?; let transfer_quantity = { - let asset = wsv.asset_mut(&destination_id)?; - let quantity: &mut Self = asset + let quantity: &mut Self = destination_asset .try_as_mut() .map_err(eyre::Error::from) .map_err(|e| Error::Conversion(e.to_string()))?; @@ -358,11 +340,11 @@ pub mod isi { wsv.emit_events([ AssetEvent::Removed(AssetChanged { - asset_id: source_id.clone(), + asset_id: source_id, amount: transfer.object.into(), }), AssetEvent::Added(AssetChanged { - asset_id: destination_id.clone(), + asset_id: destination_id, amount: transfer.object.into(), }), ]); @@ -437,7 +419,9 @@ pub mod query { use eyre::{Result, WrapErr as _}; use iroha_data_model::{ asset::{Asset, AssetDefinition}, - query::{asset::IsAssetDefinitionOwner, error::QueryExecutionFail as Error, MetadataValue}, + query::{ + asset::FindAssetDefinitionById, error::QueryExecutionFail as Error, MetadataValue, + }, }; use super::*; @@ -713,21 +697,4 @@ pub mod query { .map(Into::into) } } - - impl ValidQuery for IsAssetDefinitionOwner { - #[metrics("is_asset_definition_owner")] - fn execute(&self, wsv: &WorldStateView) -> Result { - let asset_definition_id = wsv - .evaluate(&self.asset_definition_id) - .wrap_err("Failed to get asset definition id") - .map_err(|e| Error::Evaluate(e.to_string()))?; - let account_id = wsv - .evaluate(&self.account_id) - .wrap_err("Failed to get account id") - .map_err(|e| Error::Evaluate(e.to_string()))?; - - let entry = wsv.asset_definition(&asset_definition_id)?; - Ok(entry.owned_by == account_id) - } - } } diff --git a/core/src/smartcontracts/isi/block.rs b/core/src/smartcontracts/isi/block.rs index 017f03a4a6d..4f241372ef1 100644 --- a/core/src/smartcontracts/isi/block.rs +++ b/core/src/smartcontracts/isi/block.rs @@ -1,7 +1,7 @@ //! This module contains trait implementations related to block queries use eyre::{Result, WrapErr}; use iroha_data_model::{ - block::{BlockHeader, VersionedCommittedBlock}, + block::{BlockHeader, SignedBlock}, evaluate::ExpressionEvaluator, query::{ block::FindBlockHeaderByHash, @@ -17,9 +17,11 @@ impl ValidQuery for FindAllBlocks { fn execute<'wsv>( &self, wsv: &'wsv WorldStateView, - ) -> Result + 'wsv>, QueryExecutionFail> { + ) -> Result + 'wsv>, QueryExecutionFail> { Ok(Box::new( - wsv.all_blocks().rev().map(|block| Clone::clone(&*block)), + wsv.all_blocks() + .rev() + .map(|block| SignedBlock::clone(&block)), )) } } @@ -33,7 +35,7 @@ impl ValidQuery for FindAllBlockHeaders { Ok(Box::new( wsv.all_blocks() .rev() - .map(|block| block.as_v1().header.clone()), + .map(|block| block.payload().header.clone()), )) } } @@ -51,6 +53,6 @@ impl ValidQuery for FindBlockHeaderByHash { .find(|block| block.hash() == hash) .ok_or_else(|| QueryExecutionFail::Find(FindError::Block(hash)))?; - Ok(block.as_v1().header.clone()) + Ok(block.payload().header.clone()) } } diff --git a/core/src/smartcontracts/isi/domain.rs b/core/src/smartcontracts/isi/domain.rs index 2f82fec7c3c..b7930106a04 100644 --- a/core/src/smartcontracts/isi/domain.rs +++ b/core/src/smartcontracts/isi/domain.rs @@ -16,7 +16,7 @@ impl Registrable for iroha_data_model::domain::NewDomain { #[must_use] #[inline] - fn build(self, _authority: &AccountId) -> Self::Target { + fn build(self, authority: &AccountId) -> Self::Target { Self::Target { id: self.id, accounts: AccountsMap::default(), @@ -24,6 +24,7 @@ impl Registrable for iroha_data_model::domain::NewDomain { asset_total_quantities: AssetTotalQuantityMap::default(), metadata: self.metadata, logo: self.logo, + owned_by: authority.clone(), } } } @@ -283,6 +284,19 @@ pub mod isi { Ok(()) } } + + impl Execute for Transfer { + fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { + wsv.domain_mut(&self.object)?.owned_by = self.destination_id.clone(); + + wsv.emit_events(Some(DomainEvent::OwnerChanged(DomainOwnerChanged { + domain_id: self.object, + new_owner: self.destination_id, + }))); + + Ok(()) + } + } } /// Query module provides [`Query`] Domain related implementations. diff --git a/core/src/smartcontracts/isi/mod.rs b/core/src/smartcontracts/isi/mod.rs index a5ce89639ac..7f80bbfade1 100644 --- a/core/src/smartcontracts/isi/mod.rs +++ b/core/src/smartcontracts/isi/mod.rs @@ -1,11 +1,6 @@ //! This module contains enumeration of all possible Iroha Special -//! Instructions [`InstructionBox`], generic instruction types and related +//! Instructions [`InstructionExpr`], generic instruction types and related //! implementations. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] pub mod account; pub mod asset; pub mod block; @@ -36,7 +31,7 @@ pub trait Registrable { fn build(self, authority: &AccountId) -> Self::Target; } -impl Execute for InstructionBox { +impl Execute for InstructionExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { iroha_logger::debug!(isi=%self, "Executing"); @@ -44,7 +39,7 @@ impl Execute for InstructionBox { ($($isi:ident),+ $(,)?) => { match self { $( - InstructionBox::$isi(isi) => isi.execute(authority, wsv), )+ + InstructionExpr::$isi(isi) => isi.execute(authority, wsv), )+ } }; } @@ -72,7 +67,7 @@ impl Execute for InstructionBox { } } -impl Execute for RegisterBox { +impl Execute for RegisterExpr { #[iroha_logger::log(name = "register", skip_all, fields(id))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let object_id = wsv.evaluate(&self.object)?; @@ -88,15 +83,14 @@ impl Execute for RegisterBox { } RegistrableBox::Asset(object) => Register:: { object }.execute(authority, wsv), RegistrableBox::Trigger(object) => { - Register::> { object } - .execute(authority, wsv) + Register::> { object }.execute(authority, wsv) } RegistrableBox::Role(object) => Register:: { object }.execute(authority, wsv), } } } -impl Execute for UnregisterBox { +impl Execute for UnregisterExpr { #[iroha_logger::log(name = "unregister", skip_all, fields(id))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let object_id = wsv.evaluate(&self.object_id)?; @@ -115,8 +109,7 @@ impl Execute for UnregisterBox { IdBox::PeerId(object_id) => Unregister:: { object_id }.execute(authority, wsv), IdBox::RoleId(object_id) => Unregister:: { object_id }.execute(authority, wsv), IdBox::TriggerId(object_id) => { - Unregister::> { object_id } - .execute(authority, wsv) + Unregister::> { object_id }.execute(authority, wsv) } IdBox::PermissionTokenId(_) | IdBox::ParameterId(_) => { Err(Error::Evaluate(InstructionType::Unregister.into())) @@ -125,7 +118,7 @@ impl Execute for UnregisterBox { } } -impl Execute for MintBox { +impl Execute for MintExpr { #[iroha_logger::log(name = "Mint", skip_all, fields(destination))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let destination_id = wsv.evaluate(&self.destination_id)?; @@ -134,42 +127,42 @@ impl Execute for MintBox { iroha_logger::trace!(?object, %authority); match (destination_id, object) { (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::U32(object))) => { - Mint:: { + Mint:: { object, destination_id, } .execute(authority, wsv) } (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::U128(object))) => { - Mint:: { + Mint:: { object, destination_id, } .execute(authority, wsv) } (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::Fixed(object))) => { - Mint:: { + Mint:: { object, destination_id, } .execute(authority, wsv) } (IdBox::AccountId(destination_id), Value::PublicKey(object)) => { - Mint:: { + Mint:: { object, destination_id, } .execute(authority, wsv) } (IdBox::AccountId(destination_id), Value::SignatureCheckCondition(object)) => { - Mint:: { + Mint:: { object, destination_id, } .execute(authority, wsv) } (IdBox::TriggerId(destination_id), Value::Numeric(NumericValue::U32(object))) => { - Mint::, u32> { + Mint::> { object, destination_id, } @@ -180,7 +173,7 @@ impl Execute for MintBox { } } -impl Execute for BurnBox { +impl Execute for BurnExpr { #[iroha_logger::log(name = "burn", skip_all, fields(destination))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let destination_id = wsv.evaluate(&self.destination_id)?; @@ -189,7 +182,7 @@ impl Execute for BurnBox { iroha_logger::trace!(?object, %authority); match (destination_id, object) { (IdBox::AssetId(destination_id), Value::Numeric(NumericValue::U32(object))) => { - Burn:: { + Burn:: { object, destination_id, } @@ -210,6 +203,13 @@ impl Execute for BurnBox { destination_id, } .execute(authority, wsv), + (IdBox::TriggerId(destination_id), Value::Numeric(NumericValue::U32(object))) => { + Burn::> { + object, + destination_id, + } + .execute(authority, wsv) + } // TODO: Not implemented yet. // (IdBox::AccountId(account_id), Value::SignatureCheckCondition(condition)) => { // Burn::{condition, account_id}.execute(authority, wsv) @@ -219,35 +219,57 @@ impl Execute for BurnBox { } } -impl Execute for TransferBox { +impl Execute for TransferExpr { #[iroha_logger::log(name = "transfer", skip_all, fields(from, to))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let (IdBox::AssetId(source_id), IdBox::AccountId(destination_id)) = ( - wsv.evaluate(&self.source_id)?, - wsv.evaluate(&self.destination_id)?, - ) else { - return Err(Error::Evaluate(InstructionType::Transfer.into())); - }; - - let value = wsv.evaluate(&self.object)?; + let source_id = wsv.evaluate(&self.source_id)?; + let destination_id = wsv.evaluate(&self.destination_id)?; + let object = wsv.evaluate(&self.object)?; + iroha_logger::trace!(%object, %authority); Span::current().record("from", source_id.to_string()); Span::current().record("to", destination_id.to_string()); - iroha_logger::trace!(%value, %authority); - match value { - Value::Numeric(NumericValue::U32(object)) => Transfer { - source_id, - object, - destination_id, - } - .execute(authority, wsv), - Value::Numeric(NumericValue::U128(object)) => Transfer { + match (source_id, object, destination_id) { + ( + IdBox::AssetId(source_id), + Value::Numeric(value), + IdBox::AccountId(destination_id), + ) => match value { + NumericValue::U32(object) => Transfer { + source_id, + object, + destination_id, + } + .execute(authority, wsv), + NumericValue::U128(object) => Transfer { + source_id, + object, + destination_id, + } + .execute(authority, wsv), + NumericValue::Fixed(object) => Transfer { + source_id, + object, + destination_id, + } + .execute(authority, wsv), + _ => Err(Error::Evaluate(InstructionType::Transfer.into())), + }, + ( + IdBox::AccountId(source_id), + Value::Id(IdBox::AssetDefinitionId(object)), + IdBox::AccountId(destination_id), + ) => Transfer { source_id, object, destination_id, } .execute(authority, wsv), - Value::Numeric(NumericValue::Fixed(object)) => Transfer { + ( + IdBox::AccountId(source_id), + Value::Id(IdBox::DomainId(object)), + IdBox::AccountId(destination_id), + ) => Transfer { source_id, object, destination_id, @@ -258,7 +280,7 @@ impl Execute for TransferBox { } } -impl Execute for SetKeyValueBox { +impl Execute for SetKeyValueExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let key = wsv.evaluate(&self.key)?; let value = wsv.evaluate(&self.value)?; @@ -293,7 +315,7 @@ impl Execute for SetKeyValueBox { } } -impl Execute for RemoveKeyValueBox { +impl Execute for RemoveKeyValueExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let key = wsv.evaluate(&self.key)?; iroha_logger::trace!(?key, %authority); @@ -307,12 +329,15 @@ impl Execute for RemoveKeyValueBox { IdBox::AccountId(object_id) => { RemoveKeyValue:: { object_id, key }.execute(authority, wsv) } + IdBox::DomainId(object_id) => { + RemoveKeyValue:: { object_id, key }.execute(authority, wsv) + } _ => Err(Error::Evaluate(InstructionType::RemoveKeyValue.into())), } } } -impl Execute for Conditional { +impl Execute for ConditionalExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { iroha_logger::trace!(?self); if wsv.evaluate(&self.condition)? { @@ -324,7 +349,7 @@ impl Execute for Conditional { } } -impl Execute for Pair { +impl Execute for PairExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { iroha_logger::trace!(?self); @@ -334,7 +359,7 @@ impl Execute for Pair { } } -impl Execute for SequenceBox { +impl Execute for SequenceExpr { #[iroha_logger::log(skip_all, name = "Sequence", fields(count))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { Span::current().record("count", self.instructions.len()); @@ -346,7 +371,7 @@ impl Execute for SequenceBox { } } -impl Execute for FailBox { +impl Execute for Fail { fn execute(self, _authority: &AccountId, _wsv: &mut WorldStateView) -> Result<(), Error> { iroha_logger::trace!(?self); @@ -354,86 +379,78 @@ impl Execute for FailBox { } } -impl Execute for GrantBox { +impl Execute for GrantExpr { #[iroha_logger::log(name = "grant", skip_all, fields(object))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let destination_id = wsv.evaluate(&self.destination_id)?; let object = wsv.evaluate(&self.object)?; Span::current().record("object", &object.to_string()); iroha_logger::trace!(%destination_id, %authority); - match (destination_id, object) { - (IdBox::AccountId(destination_id), Value::PermissionToken(object)) => { - Grant:: { - object, - destination_id, - } - .execute(authority, wsv) + match object { + Value::PermissionToken(object) => Grant:: { + object, + destination_id, } - (IdBox::AccountId(destination_id), Value::Id(IdBox::RoleId(object))) => { - Grant:: { - object, - destination_id, - } - .execute(authority, wsv) + .execute(authority, wsv), + Value::Id(IdBox::RoleId(object)) => Grant:: { + object, + destination_id, } + .execute(authority, wsv), _ => Err(Error::Evaluate(InstructionType::Grant.into())), } } } -impl Execute for RevokeBox { +impl Execute for RevokeExpr { #[iroha_logger::log(name = "revoke", skip_all, fields(object))] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let destination_id = wsv.evaluate(&self.destination_id)?; let object = wsv.evaluate(&self.object)?; Span::current().record("object", &object.to_string()); iroha_logger::trace!(?destination_id, ?object, %authority); - match (destination_id, object) { - (IdBox::AccountId(destination_id), Value::PermissionToken(object)) => { - Revoke:: { - object, - destination_id, - } - .execute(authority, wsv) + match object { + Value::PermissionToken(object) => Revoke:: { + object, + destination_id, } - (IdBox::AccountId(destination_id), Value::Id(IdBox::RoleId(object))) => { - Revoke:: { - object, - destination_id, - } - .execute(authority, wsv) + .execute(authority, wsv), + Value::Id(IdBox::RoleId(object)) => Revoke:: { + object, + destination_id, } + .execute(authority, wsv), _ => Err(Error::Evaluate(InstructionType::Revoke.into())), } } } -impl Execute for SetParameterBox { +impl Execute for SetParameterExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let parameter = wsv.evaluate(&self.parameter)?; SetParameter { parameter }.execute(authority, wsv) } } -impl Execute for NewParameterBox { +impl Execute for NewParameterExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let parameter = wsv.evaluate(&self.parameter)?; NewParameter { parameter }.execute(authority, wsv) } } -impl Execute for UpgradeBox { +impl Execute for UpgradeExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let object = wsv.evaluate(&self.object)?; match object { - UpgradableBox::Validator(object) => { - Upgrade:: { object }.execute(authority, wsv) + UpgradableBox::Executor(object) => { + Upgrade:: { object }.execute(authority, wsv) } } } } -impl Execute for LogBox { +impl Execute for LogExpr { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let level = wsv.evaluate(&self.level)?; let msg = wsv.evaluate(&self.msg)?; @@ -449,40 +466,40 @@ pub mod prelude { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use core::str::FromStr as _; use std::sync::Arc; use iroha_crypto::KeyPair; + use tokio::test; use super::*; - use crate::{kura::Kura, wsv::World, PeersIds}; + use crate::{kura::Kura, query::store::LiveQueryStore, wsv::World, PeersIds}; fn wsv_with_test_domains(kura: &Arc) -> Result { let world = World::with([], PeersIds::new()); - let mut wsv = WorldStateView::new(world, kura.clone()); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world, kura.clone(), query_handle); let genesis_account_id = AccountId::from_str("genesis@genesis")?; let account_id = AccountId::from_str("alice@wonderland")?; let (public_key, _) = KeyPair::generate()?.into(); let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; - RegisterBox::new(Domain::new(DomainId::from_str("wonderland")?)) + RegisterExpr::new(Domain::new(DomainId::from_str("wonderland")?)) .execute(&genesis_account_id, &mut wsv)?; - RegisterBox::new(Account::new(account_id, [public_key])) + RegisterExpr::new(Account::new(account_id, [public_key])) .execute(&genesis_account_id, &mut wsv)?; - RegisterBox::new(AssetDefinition::store(asset_definition_id)) + RegisterExpr::new(AssetDefinition::store(asset_definition_id)) .execute(&genesis_account_id, &mut wsv)?; Ok(wsv) } #[test] - fn asset_store() -> Result<()> { + async fn asset_store() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let mut wsv = wsv_with_test_domains(&kura)?; let account_id = AccountId::from_str("alice@wonderland")?; let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let asset_id = AssetId::new(asset_definition_id, account_id.clone()); - SetKeyValueBox::new( + SetKeyValueExpr::new( IdBox::from(asset_id.clone()), Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], @@ -505,11 +522,11 @@ mod tests { } #[test] - fn account_metadata() -> Result<()> { + async fn account_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let mut wsv = wsv_with_test_domains(&kura)?; let account_id = AccountId::from_str("alice@wonderland")?; - SetKeyValueBox::new( + SetKeyValueExpr::new( IdBox::from(account_id.clone()), Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], @@ -533,12 +550,12 @@ mod tests { } #[test] - fn asset_definition_metadata() -> Result<()> { + async fn asset_definition_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let mut wsv = wsv_with_test_domains(&kura)?; let definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let account_id = AccountId::from_str("alice@wonderland")?; - SetKeyValueBox::new( + SetKeyValueExpr::new( IdBox::from(definition_id.clone()), Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], @@ -561,12 +578,12 @@ mod tests { } #[test] - fn domain_metadata() -> Result<()> { + async fn domain_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let mut wsv = wsv_with_test_domains(&kura)?; let domain_id = DomainId::from_str("wonderland")?; let account_id = AccountId::from_str("alice@wonderland")?; - SetKeyValueBox::new( + SetKeyValueExpr::new( IdBox::from(domain_id.clone()), Name::from_str("Bytes")?, vec![1_u32, 2_u32, 3_u32], @@ -589,14 +606,14 @@ mod tests { } #[test] - fn executing_unregistered_trigger_should_return_error() -> Result<()> { + async fn executing_unregistered_trigger_should_return_error() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let mut wsv = wsv_with_test_domains(&kura)?; let account_id = AccountId::from_str("alice@wonderland")?; let trigger_id = TriggerId::from_str("test_trigger_id")?; assert!(matches!( - ExecuteTriggerBox::new(trigger_id) + ExecuteTriggerExpr::new(trigger_id) .execute(&account_id, &mut wsv) .expect_err("Error expected"), Error::Find(_) @@ -606,7 +623,7 @@ mod tests { } #[test] - fn unauthorized_trigger_execution_should_return_error() -> Result<()> { + async fn unauthorized_trigger_execution_should_return_error() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let mut wsv = wsv_with_test_domains(&kura)?; let account_id = AccountId::from_str("alice@wonderland")?; @@ -618,14 +635,14 @@ mod tests { .expect("Failed to generate KeyPair") .into(); let register_account = - RegisterBox::new(Account::new(fake_account_id.clone(), [public_key])); + RegisterExpr::new(Account::new(fake_account_id.clone(), [public_key])); register_account.execute(&account_id, &mut wsv)?; // register the trigger - let register_trigger = RegisterBox::new(Trigger::new( + let register_trigger = RegisterExpr::new(Trigger::new( trigger_id.clone(), Action::new( - Vec::::new(), + Vec::::new(), Repeats::Indefinitely, account_id.clone(), TriggeringFilterBox::ExecuteTrigger(ExecuteTriggerEventFilter::new( @@ -638,11 +655,11 @@ mod tests { register_trigger.execute(&account_id, &mut wsv)?; // execute with the valid account - ExecuteTriggerBox::new(trigger_id.clone()).execute(&account_id, &mut wsv)?; + ExecuteTriggerExpr::new(trigger_id.clone()).execute(&account_id, &mut wsv)?; // execute with the fake account assert!(matches!( - ExecuteTriggerBox::new(trigger_id) + ExecuteTriggerExpr::new(trigger_id) .execute(&fake_account_id, &mut wsv) .expect_err("Error expected"), Error::InvariantViolation(_) diff --git a/core/src/smartcontracts/isi/query.rs b/core/src/smartcontracts/isi/query.rs index 21d97aec86c..19671e06587 100644 --- a/core/src/smartcontracts/isi/query.rs +++ b/core/src/smartcontracts/isi/query.rs @@ -1,10 +1,5 @@ //! Query functionality. The common error type is also defined here, //! alongside functions for converting them into HTTP responses. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use eyre::Result; use iroha_data_model::{prelude::*, query::error::QueryExecutionFail as Error}; use parity_scale_codec::{Decode, Encode}; @@ -52,13 +47,13 @@ impl_lazy! { iroha_data_model::query::MetadataValue, iroha_data_model::query::TransactionQueryOutput, iroha_data_model::permission::PermissionTokenSchema, - iroha_data_model::trigger::Trigger, + iroha_data_model::trigger::Trigger, } /// Query Request statefully validated on the Iroha node side. -#[derive(Debug, Decode, Encode)] +#[derive(Debug, Clone, Decode, Encode)] #[repr(transparent)] -pub struct ValidQueryRequest(VersionedSignedQuery); +pub struct ValidQueryRequest(SignedQuery); impl ValidQueryRequest { /// Validate query. @@ -67,10 +62,7 @@ impl ValidQueryRequest { /// - Account doesn't exist /// - Account doesn't have the correct public key /// - Account has incorrect permissions - pub fn validate( - query: VersionedSignedQuery, - wsv: &WorldStateView, - ) -> Result { + pub fn validate(query: SignedQuery, wsv: &WorldStateView) -> Result { let account_has_public_key = wsv .map_account(query.authority(), |account| { account.signatories.contains(query.signature().public_key()) @@ -82,7 +74,7 @@ impl ValidQueryRequest { )) .into()); } - wsv.validator() + wsv.executor() .validate_query(wsv, query.authority(), query.query().clone())?; Ok(Self(query)) } @@ -130,7 +122,6 @@ impl ValidQuery for QueryBox { FindAssetDefinitionById, FindAssetQuantityById, FindTotalAssetQuantityByAssetDefinitionId, - IsAssetDefinitionOwner, FindDomainById, FindBlockHeaderByHash, FindTransactionByHash, @@ -174,20 +165,18 @@ impl ValidQuery for QueryBox { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use std::str::FromStr as _; use iroha_crypto::{Hash, HashOf, KeyPair}; - use iroha_data_model::{ - block::VersionedCommittedBlock, query::error::FindError, transaction::TransactionLimits, - }; + use iroha_data_model::{query::error::FindError, transaction::TransactionLimits}; + use iroha_primitives::unique_vec::UniqueVec; use once_cell::sync::Lazy; + use tokio::test; use super::*; use crate::{ - block::*, kura::Kura, smartcontracts::isi::Registrable as _, tx::AcceptedTransaction, - wsv::World, PeersIds, + block::*, kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _, + sumeragi::network_topology::Topology, tx::AcceptedTransaction, wsv::World, PeersIds, }; static ALICE_KEYS: Lazy = Lazy::new(|| KeyPair::generate().unwrap()); @@ -261,7 +250,8 @@ mod tests { invalid_tx_per_block: usize, ) -> Result { let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world_with_test_domains(), kura.clone()); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world_with_test_domains(), kura.clone(), query_handle); let limits = TransactionLimits { max_instruction_number: 1, @@ -275,14 +265,14 @@ mod tests { wsv.config.transaction_limits = limits; let valid_tx = { - let instructions: [InstructionBox; 0] = []; + let instructions: [InstructionExpr; 0] = []; let tx = TransactionBuilder::new(ALICE_ID.clone()) .with_instructions(instructions) .sign(ALICE_KEYS.clone())?; AcceptedTransaction::accept(tx, &limits)? }; let invalid_tx = { - let isi = FailBox::new("fail"); + let isi = Fail::new("fail"); let tx = TransactionBuilder::new(ALICE_ID.clone()) .with_instructions([isi.clone(), isi]) .sign(ALICE_KEYS.clone())?; @@ -292,33 +282,22 @@ mod tests { let mut transactions = vec![valid_tx; valid_tx_per_block]; transactions.append(&mut vec![invalid_tx; invalid_tx_per_block]); - let first_block: VersionedCommittedBlock = BlockBuilder { - transactions: transactions.clone(), - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: crate::sumeragi::network_topology::Topology::new(vec![]), - key_pair: ALICE_KEYS.clone(), - wsv: &mut wsv.clone(), - } - .build() - .commit_unchecked() - .into(); + let topology = Topology::new(UniqueVec::new()); + let first_block = BlockBuilder::new(transactions.clone(), topology.clone(), Vec::new()) + .chain(0, &mut wsv) + .sign(ALICE_KEYS.clone())? + .commit(&topology) + .expect("Block is valid"); wsv.apply(&first_block)?; kura.store_block(first_block); for _ in 1u64..blocks { - let block: VersionedCommittedBlock = BlockBuilder { - transactions: transactions.clone(), - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: crate::sumeragi::network_topology::Topology::new(vec![]), - key_pair: ALICE_KEYS.clone(), - wsv: &mut wsv.clone(), - } - .build() - .commit_unchecked() - .into(); + let block = BlockBuilder::new(transactions.clone(), topology.clone(), Vec::new()) + .chain(0, &mut wsv) + .sign(ALICE_KEYS.clone())? + .commit(&topology) + .expect("Block is valid"); wsv.apply(&block)?; kura.store_block(block); @@ -328,9 +307,10 @@ mod tests { } #[test] - fn asset_store() -> Result<()> { + async fn asset_store() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let wsv = WorldStateView::new(world_with_test_asset_with_metadata(), kura); + let query_handle = LiveQueryStore::test().start(); + let wsv = WorldStateView::new(world_with_test_asset_with_metadata(), kura, query_handle); let asset_definition_id = AssetDefinitionId::from_str("rose#wonderland")?; let asset_id = AssetId::new(asset_definition_id, ALICE_ID.clone()); @@ -344,9 +324,10 @@ mod tests { } #[test] - fn account_metadata() -> Result<()> { + async fn account_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let wsv = WorldStateView::new(world_with_test_account_with_metadata()?, kura); + let query_handle = LiveQueryStore::test().start(); + let wsv = WorldStateView::new(world_with_test_account_with_metadata()?, kura, query_handle); let bytes = FindAccountKeyValueByIdAndKey::new(ALICE_ID.clone(), Name::from_str("Bytes")?) .execute(&wsv)?; @@ -358,7 +339,7 @@ mod tests { } #[test] - fn find_all_blocks() -> Result<()> { + async fn find_all_blocks() -> Result<()> { let num_blocks = 100; let wsv = wsv_with_test_blocks_and_transactions(num_blocks, 1, 1)?; @@ -371,7 +352,7 @@ mod tests { } #[test] - fn find_all_block_headers() -> Result<()> { + async fn find_all_block_headers() -> Result<()> { let num_blocks = 100; let wsv = wsv_with_test_blocks_and_transactions(num_blocks, 1, 1)?; @@ -384,13 +365,13 @@ mod tests { } #[test] - fn find_block_header_by_hash() -> Result<()> { + async fn find_block_header_by_hash() -> Result<()> { let wsv = wsv_with_test_blocks_and_transactions(1, 1, 1)?; let block = wsv.all_blocks().last().expect("WSV is empty"); assert_eq!( FindBlockHeaderByHash::new(block.hash()).execute(&wsv)?, - block.as_v1().header + block.payload().header ); assert!( @@ -403,7 +384,7 @@ mod tests { } #[test] - fn find_all_transactions() -> Result<()> { + async fn find_all_transactions() -> Result<()> { let num_blocks = 100; let wsv = wsv_with_test_blocks_and_transactions(num_blocks, 1, 1)?; @@ -427,35 +408,31 @@ mod tests { } #[test] - fn find_transaction() -> Result<()> { + async fn find_transaction() -> Result<()> { let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world_with_test_domains(), kura.clone()); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world_with_test_domains(), kura.clone(), query_handle); - let instructions: [InstructionBox; 0] = []; + let instructions: [InstructionExpr; 0] = []; let tx = TransactionBuilder::new(ALICE_ID.clone()) .with_instructions(instructions) .sign(ALICE_KEYS.clone())?; - let tx_limits = &wsv.transaction_validator().transaction_limits; + let tx_limits = &wsv.transaction_executor().transaction_limits; let va_tx = AcceptedTransaction::accept(tx, tx_limits)?; - let vcb: VersionedCommittedBlock = BlockBuilder { - transactions: vec![va_tx.clone()], - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: crate::sumeragi::network_topology::Topology::new(vec![]), - key_pair: ALICE_KEYS.clone(), - wsv: &mut wsv.clone(), - } - .build() - .commit_unchecked() - .into(); + let topology = Topology::new(UniqueVec::new()); + let vcb = BlockBuilder::new(vec![va_tx.clone()], topology.clone(), Vec::new()) + .chain(0, &mut wsv) + .sign(ALICE_KEYS.clone())? + .commit(&topology) + .expect("Block is valid"); wsv.apply(&vcb)?; kura.store_block(vcb); let unapplied_tx = TransactionBuilder::new(ALICE_ID.clone()) - .with_instructions([UnregisterBox::new( + .with_instructions([UnregisterExpr::new( "account@domain".parse::().unwrap(), )]) .sign(ALICE_KEYS.clone())?; @@ -468,16 +445,13 @@ mod tests { let found_accepted = FindTransactionByHash::new(va_tx.hash()).execute(&wsv)?; if found_accepted.transaction.error.is_none() { - assert_eq!( - va_tx.hash().transmute(), - found_accepted.transaction.value.hash() - ) + assert_eq!(va_tx.hash(), found_accepted.transaction.hash()) } Ok(()) } #[test] - fn domain_metadata() -> Result<()> { + async fn domain_metadata() -> Result<()> { let kura = Kura::blank_kura_for_testing(); let wsv = { let mut metadata = Metadata::new(); @@ -498,7 +472,8 @@ mod tests { AssetDefinition::quantity(asset_definition_id).build(&ALICE_ID) ) .is_none()); - WorldStateView::new(World::with([domain], PeersIds::new()), kura) + let query_handle = LiveQueryStore::test().start(); + WorldStateView::new(World::with([domain], PeersIds::new()), kura, query_handle) }; let domain_id = DomainId::from_str("wonderland")?; diff --git a/core/src/smartcontracts/isi/triggers/mod.rs b/core/src/smartcontracts/isi/triggers/mod.rs index 6a9a1a6ad8c..7c814b6fe47 100644 --- a/core/src/smartcontracts/isi/triggers/mod.rs +++ b/core/src/smartcontracts/isi/triggers/mod.rs @@ -22,9 +22,8 @@ pub mod isi { use super::{super::prelude::*, *}; - impl Execute for Register> { + impl Execute for Register> { #[metrics(+"register_trigger")] - #[allow(clippy::expect_used)] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let new_trigger = self.object; @@ -82,7 +81,7 @@ pub mod isi { } } - impl Execute for Unregister> { + impl Execute for Unregister> { #[metrics(+"unregister_trigger")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let trigger_id = self.object_id.clone(); @@ -101,7 +100,7 @@ pub mod isi { } } - impl Execute for Mint, u32> { + impl Execute for Mint> { #[metrics(+"mint_trigger_repetitions")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let id = self.destination_id; @@ -133,7 +132,7 @@ pub mod isi { } } - impl Execute for Burn, u32> { + impl Execute for Burn> { #[metrics(+"burn_trigger_repetitions")] fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let trigger = self.destination_id; @@ -155,7 +154,7 @@ pub mod isi { } } - impl Execute for ExecuteTriggerBox { + impl Execute for ExecuteTriggerExpr { #[metrics(+"execute_trigger")] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let id = wsv.evaluate(&self.trigger_id)?; @@ -177,7 +176,7 @@ pub mod isi { if allow_execute { Ok(()) } else { - // TODO: We should check authority on Runtime Validator level + // TODO: We should check authority on Runtime Executor level // so currently the error message is not exhaustive Err(Error::InvariantViolation(String::from( "Trigger can't be executed manually", @@ -199,7 +198,7 @@ pub mod query { use iroha_data_model::{ events::TriggeringFilterBox, query::{error::QueryExecutionFail as Error, MetadataValue}, - trigger::{OptimizedExecutable, Trigger, TriggerId}, + trigger::{Trigger, TriggerId}, }; use super::*; @@ -217,29 +216,19 @@ pub mod query { impl ValidQuery for FindTriggerById { #[metrics(+"find_trigger_by_id")] - fn execute( - &self, - wsv: &WorldStateView, - ) -> Result, Error> { + fn execute(&self, wsv: &WorldStateView) -> Result, Error> { let id = wsv .evaluate(&self.id) .map_err(|e| Error::Evaluate(format!("Failed to evaluate trigger id. {e}")))?; iroha_logger::trace!(%id); - // Can't use just `ActionTrait::clone_and_box` cause this will trigger lifetime mismatch + // Can't use just `LoadedActionTrait::clone_and_box` cause this will trigger lifetime mismatch #[allow(clippy::redundant_closure_for_method_calls)] - let Action { - executable: loaded_executable, - repeats, - authority, - filter, - metadata, - } = wsv + let loaded_action = wsv .triggers() .inspect_by_id(&id, |action| action.clone_and_box()) .ok_or_else(|| Error::Find(FindError::Trigger(id.clone())))?; - let action = - Action::new(loaded_executable, repeats, authority, filter).with_metadata(metadata); + let action = wsv.triggers().get_original_action(loaded_action); // TODO: Should we redact the metadata if the account is not the authority/owner? Ok(Trigger::new(id, action)) @@ -274,32 +263,22 @@ pub mod query { fn execute<'wsv>( &self, wsv: &'wsv WorldStateView, - ) -> eyre::Result< - Box> + 'wsv>, - Error, - > { + ) -> eyre::Result> + 'wsv>, Error> + { let domain_id = wsv .evaluate(&self.domain_id) .map_err(|e| Error::Evaluate(format!("Failed to evaluate domain id. {e}")))?; - Ok(Box::new(wsv.triggers().inspect_by_domain_id( - &domain_id, - |trigger_id, action| { - let Action { - executable: loaded_executable, - repeats, - authority, - filter, - metadata, - } = action.clone_and_box(); - - Trigger::new( - trigger_id.clone(), - Action::new(loaded_executable, repeats, authority, filter) - .with_metadata(metadata), - ) - }, - ))) + Ok(Box::new( + wsv.triggers() + .inspect_by_domain_id(&domain_id, |trigger_id, action| { + (trigger_id.clone(), action.clone_and_box()) + }) + .map(|(trigger_id, action)| { + let action = wsv.triggers().get_original_action(action); + Trigger::new(trigger_id, action) + }), + )) } } } diff --git a/core/src/smartcontracts/isi/triggers/set.rs b/core/src/smartcontracts/isi/triggers/set.rs index 1897ae48081..3cd20738837 100644 --- a/core/src/smartcontracts/isi/triggers/set.rs +++ b/core/src/smartcontracts/isi/triggers/set.rs @@ -17,7 +17,8 @@ use iroha_data_model::{ isi::error::{InstructionExecutionError, MathError}, prelude::*, query::error::FindError, - trigger::{action::ActionTrait, OptimizedExecutable, Trigger, WasmInternalRepr}, + transaction::WasmSmartContract, + trigger::Trigger, }; use serde::{ de::{DeserializeSeed, MapAccess, Visitor}, @@ -38,8 +39,98 @@ pub enum Error { /// Result type for [`Set`] operations. pub type Result = core::result::Result; -/// Type of action with pre-loaded executable. -pub type LoadedAction = Action; +/// Same as [`Action`](`iroha_data_model::trigger::Action`) but with +/// executable in pre-loaded form +#[derive(Clone, Debug)] +pub struct LoadedAction { + /// The executable linked to this action in loaded form + executable: LoadedExecutable, + /// The repeating scheme of the action. It's kept as part of the + /// action and not inside the [`Trigger`] type, so that further + /// sanity checking can be done. + pub repeats: Repeats, + /// Account executing this action + pub authority: AccountId, + /// Defines events which trigger the `Action` + pub filter: F, + /// Metadata used as persistent storage for trigger data. + pub metadata: Metadata, +} + +/// Trait common for all `LoadedAction`s +pub trait LoadedActionTrait { + /// Get action executable + fn executable(&self) -> &LoadedExecutable; + + /// Get action repeats enum + fn repeats(&self) -> &Repeats; + + /// Set action repeats + fn set_repeats(&mut self, repeats: Repeats); + + /// Get action technical account + fn authority(&self) -> &AccountId; + + /// Get action metadata + fn metadata(&self) -> &Metadata; + + /// Check if action is mintable. + fn mintable(&self) -> bool; + + /// Convert action to a boxed representation + fn into_boxed(self) -> LoadedAction; + + /// Same as [`into_boxed()`](LoadedActionTrait::into_boxed) but clones `self` + fn clone_and_box(&self) -> LoadedAction; +} + +impl + Clone> LoadedActionTrait for LoadedAction { + fn executable(&self) -> &LoadedExecutable { + &self.executable + } + + fn repeats(&self) -> &iroha_data_model::trigger::action::Repeats { + &self.repeats + } + + fn set_repeats(&mut self, repeats: iroha_data_model::trigger::action::Repeats) { + self.repeats = repeats; + } + + fn authority(&self) -> &AccountId { + &self.authority + } + + fn metadata(&self) -> &Metadata { + &self.metadata + } + + fn mintable(&self) -> bool { + self.filter.mintable() + } + + fn into_boxed(self) -> LoadedAction { + let Self { + executable, + repeats, + authority, + filter, + metadata, + } = self; + + LoadedAction { + executable, + repeats, + authority, + filter: filter.into(), + metadata, + } + } + + fn clone_and_box(&self) -> LoadedAction { + self.clone().into_boxed() + } +} /// Specialized structure that maps event filters to Triggers. // NB: `Set` has custom `Serialize` and `DeserializeSeed` implementations @@ -57,7 +148,7 @@ pub struct Set { /// Trigger ids with type of events they process ids: HashMap, /// Original [`WasmSmartContract`]s by [`TriggerId`] for querying purposes. - original_contracts: HashMap, + original_contracts: HashMap, WasmSmartContract>, /// List of actions that should be triggered by events provided by `handle_*` methods. /// Vector is used to save the exact triggers order. matched_ids: Vec<(Event, TriggerId)>, @@ -85,7 +176,7 @@ impl Serialize for TriggersWithContext<'_, F> { { let mut map = serializer.serialize_map(Some(self.triggers.len()))?; for (id, action) in self.triggers.iter() { - let action = self.set.get_original_action(id, action.clone()); + let action = self.set.get_original_action(action.clone()); map.serialize_entry(&id, &action)?; } map.end() @@ -145,7 +236,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { while let Some(key) = map.next_key::()? { match key.as_str() { "data_triggers" => { - let triggers: HashMap> = + let triggers: HashMap> = map.next_value()?; for (id, action) in triggers { set.add_data_trigger(self.loader.engine, Trigger::new(id, action)) @@ -153,10 +244,8 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "pipeline_triggers" => { - let triggers: HashMap< - TriggerId, - Action, - > = map.next_value()?; + let triggers: HashMap> = + map.next_value()?; for (id, action) in triggers { set.add_pipeline_trigger( self.loader.engine, @@ -166,7 +255,7 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "time_triggers" => { - let triggers: HashMap> = + let triggers: HashMap> = map.next_value()?; for (id, action) in triggers { set.add_time_trigger(self.loader.engine, Trigger::new(id, action)) @@ -174,10 +263,8 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, Set> { } } "by_call_triggers" => { - let triggers: HashMap< - TriggerId, - Action, - > = map.next_value()?; + let triggers: HashMap> = + map.next_value()?; for (id, action) in triggers { set.add_by_call_trigger( self.loader.engine, @@ -227,7 +314,7 @@ impl Set { pub fn add_data_trigger( &mut self, engine: &wasmtime::Engine, - trigger: Trigger, + trigger: Trigger, ) -> Result { self.add_to(engine, trigger, TriggeringEventType::Data, |me| { &mut me.data_triggers @@ -245,7 +332,7 @@ impl Set { pub fn add_pipeline_trigger( &mut self, engine: &wasmtime::Engine, - trigger: Trigger, + trigger: Trigger, ) -> Result { self.add_to(engine, trigger, TriggeringEventType::Pipeline, |me| { &mut me.pipeline_triggers @@ -263,7 +350,7 @@ impl Set { pub fn add_time_trigger( &mut self, engine: &wasmtime::Engine, - trigger: Trigger, + trigger: Trigger, ) -> Result { self.add_to(engine, trigger, TriggeringEventType::Time, |me| { &mut me.time_triggers @@ -281,7 +368,7 @@ impl Set { pub fn add_by_call_trigger( &mut self, engine: &wasmtime::Engine, - trigger: Trigger, + trigger: Trigger, ) -> Result { self.add_to(engine, trigger, TriggeringEventType::ExecuteTrigger, |me| { &mut me.by_call_triggers @@ -298,7 +385,7 @@ impl Set { fn add_to( &mut self, engine: &wasmtime::Engine, - trigger: Trigger, + trigger: Trigger, event_type: TriggeringEventType, map: impl FnOnce(&mut Self) -> &mut HashMap>, ) -> Result { @@ -317,12 +404,13 @@ impl Set { let loaded_executable = match executable { Executable::Wasm(bytes) => { + let hash = HashOf::new(&bytes); let loaded = LoadedExecutable::Wasm(LoadedWasm { module: wasm::load_module(engine, &bytes)?, - blob_hash: HashOf::new(&bytes), + blob_hash: hash, }); // Store original executable representation to respond to queries with. - self.original_contracts.insert(trigger_id.clone(), bytes); + self.original_contracts.insert(hash, bytes); loaded } Executable::Instructions(instructions) => LoadedExecutable::Instructions(instructions), @@ -345,16 +433,17 @@ impl Set { /// Returns `None` if there's no [`Trigger`] /// with specified `id` that has WASM executable #[inline] - pub fn get_original_contract(&self, id: &TriggerId) -> Option<&WasmSmartContract> { - self.original_contracts.get(id) + pub fn get_original_contract( + &self, + hash: &HashOf, + ) -> Option<&WasmSmartContract> { + self.original_contracts.get(hash) } - fn get_original_action( - &self, - id: &TriggerId, - action: LoadedAction, - ) -> Action { - let Action { + /// Convert [`LoadedAction`] to original [`Action`] by retrieving original + /// [`WasmSmartContract`] if applicable + pub fn get_original_action(&self, action: LoadedAction) -> Action { + let LoadedAction { executable, repeats, authority, @@ -363,9 +452,9 @@ impl Set { } = action; let original_executable = match executable { - LoadedExecutable::Wasm(_) => { + LoadedExecutable::Wasm(LoadedWasm { ref blob_hash, .. }) => { let original_wasm = self - .get_original_contract(id) + .get_original_contract(blob_hash) .cloned() .expect("No original smartcontract saved for trigger. This is a bug."); Executable::Wasm(original_wasm) @@ -388,6 +477,43 @@ impl Set { self.ids.keys() } + /// Get [`LoadedExecutable`] for given [`TriggerId`]. + /// Returns `None` if `id` is not in the set. + pub fn get_executable(&self, id: &TriggerId) -> Option<&LoadedExecutable> { + let event_type = self.ids.get(id)?; + + Some(match event_type { + TriggeringEventType::Data => { + &self + .data_triggers + .get(id) + .expect("`Set::data_triggers` doesn't contain required id. This is a bug") + .executable + } + TriggeringEventType::Pipeline => { + &self + .pipeline_triggers + .get(id) + .expect("`Set::pipeline_triggers` doesn't contain required id. This is a bug") + .executable + } + TriggeringEventType::Time => { + &self + .time_triggers + .get(id) + .expect("`Set::time_triggers` doesn't contain required id. This is a bug") + .executable + } + TriggeringEventType::ExecuteTrigger => { + &self + .by_call_triggers + .get(id) + .expect("`Set::by_call_triggers` doesn't contain required id. This is a bug") + .executable + } + }) + } + /// Apply `f` to triggers that belong to the given [`DomainId`] /// /// Return an empty list if [`Set`] doesn't contain any triggers belonging to [`DomainId`]. @@ -397,7 +523,7 @@ impl Set { f: F, ) -> impl Iterator + '_ where - F: Fn(&TriggerId, &dyn ActionTrait) -> R, + F: Fn(&TriggerId, &dyn LoadedActionTrait) -> R, { let domain_id = domain_id.clone(); @@ -440,7 +566,7 @@ impl Set { /// Return [`None`] if [`Set`] doesn't contain the trigger with the given `id`. pub fn inspect_by_id(&self, id: &TriggerId, f: F) -> Option where - F: Fn(&dyn ActionTrait) -> R, + F: Fn(&dyn LoadedActionTrait) -> R, { let event_type = self.ids.get(id).copied()?; @@ -474,7 +600,7 @@ impl Set { /// Return [`None`] if [`Set`] doesn't contain the trigger with the given `id`. pub fn inspect_by_id_mut(&mut self, id: &TriggerId, f: F) -> Option where - F: Fn(&mut dyn ActionTrait) -> R, + F: Fn(&mut dyn LoadedActionTrait) -> R, { let event_type = self.ids.get(id).copied()?; @@ -507,32 +633,49 @@ impl Set { /// /// Return `false` if [`Set`] doesn't contain the trigger with the given `id`. pub fn remove(&mut self, id: &TriggerId) -> bool { - self.original_contracts.remove(id); - self.ids - .remove(id) - .map(|event_type| match event_type { - TriggeringEventType::Data => self - .data_triggers - .remove(id) - .map(|_| ()) - .expect("`Set::data_triggers` doesn't contain required id. This is a bug"), - TriggeringEventType::Pipeline => { - self.pipeline_triggers.remove(id).map(|_| ()).expect( - "`Set::pipeline_triggers` doesn't contain required id. This is a bug", - ) - } - TriggeringEventType::Time => self - .time_triggers - .remove(id) - .map(|_| ()) - .expect("`Set::time_triggers` doesn't contain required id. This is a bug"), - TriggeringEventType::ExecuteTrigger => { - self.by_call_triggers.remove(id).map(|_| ()).expect( - "`Set::by_call_triggers` doesn't contain required id. This is a bug", - ) - } - }) - .is_some() + // Used in a map that requires this signature + #[allow(clippy::needless_pass_by_value)] + fn extract_blob_hash(action: LoadedAction) -> Option> { + match action.executable { + LoadedExecutable::Wasm(LoadedWasm { blob_hash, .. }) => Some(blob_hash), + LoadedExecutable::Instructions(_) => None, + } + } + + let Some(event_type) = self.ids.remove(id) else { + return false; + }; + + let blob_hash = match event_type { + TriggeringEventType::Data => self + .data_triggers + .remove(id) + .map(extract_blob_hash) + .expect("`Set::data_triggers` doesn't contain required id. This is a bug"), + TriggeringEventType::Pipeline => self + .pipeline_triggers + .remove(id) + .map(extract_blob_hash) + .expect("`Set::pipeline_triggers` doesn't contain required id. This is a bug"), + TriggeringEventType::Time => self + .time_triggers + .remove(id) + .map(extract_blob_hash) + .expect("`Set::time_triggers` doesn't contain required id. This is a bug"), + TriggeringEventType::ExecuteTrigger => self + .by_call_triggers + .remove(id) + .map(extract_blob_hash) + .expect("`Set::by_call_triggers` doesn't contain required id. This is a bug"), + }; + + if let Some(blob_hash) = blob_hash { + self.original_contracts + .remove(&blob_hash) + .expect("`Set::original_contracts` doesn't contain required hash. This is a bug"); + } + + true } /// Check if [`Set`] contains `id`. @@ -718,7 +861,7 @@ pub enum LoadedExecutable { /// Loaded WASM Wasm(LoadedWasm), /// Vector of ISI - Instructions(Vec), + Instructions(Vec), } impl core::fmt::Debug for LoadedExecutable { @@ -735,24 +878,6 @@ impl core::fmt::Debug for LoadedExecutable { } } -impl From for OptimizedExecutable { - fn from(executable: LoadedExecutable) -> Self { - match executable { - LoadedExecutable::Wasm(LoadedWasm { module, blob_hash }) => { - OptimizedExecutable::WasmInternalRepr(WasmInternalRepr { - serialized: module - .serialize() - .expect("Serialization of optimized wasm module should always succeed"), - blob_hash, - }) - } - LoadedExecutable::Instructions(instructions) => { - OptimizedExecutable::Instructions(instructions) - } - } - } -} - /// [`Set::mod_repeats()`] error #[derive(Debug, Clone, thiserror::Error, displaydoc::Display)] pub enum ModRepeatsError { diff --git a/core/src/smartcontracts/isi/tx.rs b/core/src/smartcontracts/isi/tx.rs index dbc85541346..b33fa69f7f5 100644 --- a/core/src/smartcontracts/isi/tx.rs +++ b/core/src/smartcontracts/isi/tx.rs @@ -5,7 +5,7 @@ use std::sync::Arc; use eyre::{Result, WrapErr}; use iroha_crypto::HashOf; use iroha_data_model::{ - block::VersionedCommittedBlock, + block::SignedBlock, evaluate::ExpressionEvaluator, prelude::*, query::{ @@ -18,11 +18,11 @@ use iroha_telemetry::metrics; use super::*; -pub(crate) struct BlockTransactionIter(Arc, usize); -pub(crate) struct BlockTransactionRef(Arc, usize); +pub(crate) struct BlockTransactionIter(Arc, usize); +pub(crate) struct BlockTransactionRef(Arc, usize); impl BlockTransactionIter { - fn new(block: Arc) -> Self { + fn new(block: Arc) -> Self { Self(block, 0) } } @@ -31,9 +31,7 @@ impl Iterator for BlockTransactionIter { type Item = BlockTransactionRef; fn next(&mut self) -> Option { - let block = self.0.as_v1(); - - if self.1 < block.transactions.len() { + if self.1 < self.0.payload().transactions.len() { let res = Some(BlockTransactionRef(Arc::clone(&self.0), self.1)); self.1 += 1; @@ -45,17 +43,15 @@ impl Iterator for BlockTransactionIter { } impl BlockTransactionRef { - fn block_hash(&self) -> HashOf { + fn block_hash(&self) -> HashOf { self.0.hash() } fn authority(&self) -> &AccountId { - let block = self.0.as_v1(); - - &block.transactions[self.1].payload().authority + &self.0.payload().transactions[self.1].payload().authority } fn value(&self) -> TransactionValue { - self.0.as_v1().transactions[self.1].clone() + self.0.payload().transactions[self.1].clone() } } @@ -115,9 +111,9 @@ impl ValidQuery for FindTransactionByHash { .ok_or_else(|| FindError::Transaction(tx_hash))?; let block_hash = block.hash(); - let block = block.as_v1(); block + .payload() .transactions .iter() .find(|transaction| transaction.value.hash() == tx_hash) diff --git a/core/src/smartcontracts/isi/world.rs b/core/src/smartcontracts/isi/world.rs index 681599efc33..64199fd9eb8 100644 --- a/core/src/smartcontracts/isi/world.rs +++ b/core/src/smartcontracts/isi/world.rs @@ -33,7 +33,7 @@ pub mod isi { let peer_id = self.object.id; let world = wsv.world_mut(); - if !world.trusted_peers_ids.insert(peer_id.clone()) { + if !world.trusted_peers_ids.push(peer_id.clone()) { return Err(RepetitionError { instruction_type: InstructionType::Register, id: IdBox::PeerId(peer_id), @@ -52,9 +52,11 @@ pub mod isi { fn execute(self, _authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let peer_id = self.object_id; let world = wsv.world_mut(); - if !world.trusted_peers_ids.remove(&peer_id) { + let Some(index) = world.trusted_peers_ids.iter().position(|id| id == &peer_id) else { return Err(FindError::Peer(peer_id).into()); - } + }; + + world.trusted_peers_ids.remove(index); wsv.emit_events(Some(PeerEvent::Removed(peer_id))); @@ -144,21 +146,17 @@ pub mod isi { fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { let role_id = self.object_id; - let mut accounts_with_role = vec![]; - for domain in wsv.domains().values() { - let account_ids = domain.accounts.values().filter_map(|account| { - if account.roles.contains(&role_id) { - return Some(account.id().clone()); - } - - None - }); - - accounts_with_role.extend(account_ids); - } + let accounts_with_role = wsv + .world + .account_roles + .iter() + .filter(|role| role.role_id.eq(&role_id)) + .map(|role| &role.account_id) + .cloned() + .collect::>(); for account_id in accounts_with_role { - let revoke: Revoke = Revoke { + let revoke = Revoke { object: role_id.clone(), destination_id: account_id, }; @@ -216,16 +214,16 @@ pub mod isi { } } - impl Execute for Upgrade { - #[metrics(+"upgrade_validator")] + impl Execute for Upgrade { + #[metrics(+"upgrade_executor")] fn execute(self, authority: &AccountId, wsv: &mut WorldStateView) -> Result<(), Error> { - let raw_validator = self.object; + let raw_executor = self.object; - // Cloning validator to avoid multiple mutable borrows of `wsv`. + // Cloning executor to avoid multiple mutable borrows of `wsv`. // Also it's a cheap operation. - let mut upgraded_validator = wsv.validator().clone(); - upgraded_validator - .migrate(raw_validator, wsv, authority) + let mut upgraded_executor = wsv.executor().clone(); + upgraded_executor + .migrate(raw_executor, wsv, authority) .map_err(|migration_error| { InvalidParameterError::Wasm(format!( "{:?}", @@ -233,9 +231,9 @@ pub mod isi { )) })?; - wsv.world_mut().validator = upgraded_validator; + wsv.world_mut().executor = upgraded_executor; - wsv.emit_events(std::iter::once(ValidatorEvent::Upgraded)); + wsv.emit_events(std::iter::once(ExecutorEvent::Upgraded)); Ok(()) } diff --git a/core/src/smartcontracts/mod.rs b/core/src/smartcontracts/mod.rs index 056dc8a6ab8..05d0195defd 100644 --- a/core/src/smartcontracts/mod.rs +++ b/core/src/smartcontracts/mod.rs @@ -77,7 +77,7 @@ impl iroha_data_model::evaluate::Context for Context<'_> { .map(|value| match value { LazyValue::Value(value) => value, // NOTE: This will only be executed when evaluating an expression for an - // instruction, i.e. it will only be executed from the validator/executor. + // instruction, i.e. it will only be executed from the executor. LazyValue::Iter(iter) => Value::Vec(iter.collect()), }) .map_err(Into::into) diff --git a/core/src/smartcontracts/wasm.rs b/core/src/smartcontracts/wasm.rs index ba9aef57c0e..014960c147f 100644 --- a/core/src/smartcontracts/wasm.rs +++ b/core/src/smartcontracts/wasm.rs @@ -1,11 +1,10 @@ //! This module contains logic related to executing smartcontracts via //! `WebAssembly` VM Smartcontracts can be written in Rust, compiled //! to wasm format and submitted in a transaction -#![allow(clippy::doc_link_with_quotes, clippy::arithmetic_side_effects)] use error::*; -use import_traits::{ - ExecuteOperations as _, GetValidatorPayloads as _, SetPermissionTokenSchema as _, +use import::traits::{ + ExecuteOperations as _, GetExecutorPayloads as _, SetPermissionTokenSchema as _, }; use iroha_config::{ base::proxy::Builder, @@ -13,64 +12,104 @@ use iroha_config::{ }; use iroha_data_model::{ account::AccountId, - isi::InstructionBox, + executor::{self, MigrationResult}, + isi::InstructionExpr, permission::PermissionTokenSchema, prelude::*, - validator::{self, MigrationResult}, - wasm::{export, import, payloads}, - Level as LogLevel, ValidationFail, + query::{QueryBox, QueryId, QueryRequest, QueryWithParameters}, + smart_contract::{ + payloads::{self, Validate}, + SmartContractQueryRequest, + }, + BatchedResponse, Level as LogLevel, ValidationFail, }; use iroha_logger::debug; // NOTE: Using error_span so that span info is logged on every event use iroha_logger::{error_span as wasm_log_span, prelude::tracing::Span}; use iroha_wasm_codec::{self as codec, WasmUsize}; -use state::{Wsv as _, WsvMut as _}; use wasmtime::{ Caller, Config, Engine, Linker, Module, Store, StoreLimits, StoreLimitsBuilder, TypedFunc, }; -use self::state::Authority; -use super::query::LazyValue; use crate::{ - smartcontracts::{Execute, ValidQuery as _}, + query::store::LiveQueryStoreHandle, + smartcontracts::{wasm::state::ValidateQueryOperation, Execute}, wsv::WorldStateView, + ValidQuery as _, }; -mod import_traits { - //! Traits which some [Runtime]s should implement to import functions from Iroha to WASM +/// Name of the exported memory +const WASM_MEMORY: &str = "memory"; +const WASM_MODULE: &str = "iroha"; + +mod export { + pub const EXECUTE_ISI: &str = "execute_instruction"; + pub const EXECUTE_QUERY: &str = "execute_query"; + pub const GET_SMART_CONTRACT_PAYLOAD: &str = "get_smart_contract_payload"; + pub const GET_TRIGGER_PAYLOAD: &str = "get_trigger_payload"; + pub const GET_MIGRATE_PAYLOAD: &str = "get_migrate_payload"; + pub const GET_VALIDATE_TRANSACTION_PAYLOAD: &str = "get_validate_transaction_payload"; + pub const GET_VALIDATE_INSTRUCTION_PAYLOAD: &str = "get_validate_instruction_payload"; + pub const GET_VALIDATE_QUERY_PAYLOAD: &str = "get_validate_query_payload"; + pub const SET_PERMISSION_TOKEN_SCHEMA: &str = "set_permission_token_schema"; + + pub const DBG: &str = "dbg"; + pub const LOG: &str = "log"; +} - use super::*; +mod import { + pub const SMART_CONTRACT_MAIN: &str = "_iroha_smart_contract_main"; + pub const SMART_CONTRACT_ALLOC: &str = "_iroha_smart_contract_alloc"; + pub const SMART_CONTRACT_DEALLOC: &str = "_iroha_smart_contract_dealloc"; - pub trait ExecuteOperations { - /// Execute `query` on host - #[codec::wrap_trait_fn] - fn execute_query(query: QueryBox, state: &S) -> Result; + pub const TRIGGER_MAIN: &str = "_iroha_trigger_main"; - /// Execute `instruction` on host - #[codec::wrap_trait_fn] - fn execute_instruction( - instruction: InstructionBox, - state: &mut S, - ) -> Result<(), ValidationFail>; - } + pub const EXECUTOR_VALIDATE_TRANSACTION: &str = "_iroha_executor_validate_transaction"; + pub const EXECUTOR_VALIDATE_INSTRUCTION: &str = "_iroha_executor_validate_instruction"; + pub const EXECUTOR_VALIDATE_QUERY: &str = "_iroha_executor_validate_query"; + pub const EXECUTOR_MIGRATE: &str = "_iroha_executor_migrate"; - pub trait GetValidatorPayloads { - #[codec::wrap_trait_fn] - fn get_migrate_payload(state: &S) -> payloads::Migrate; + pub mod traits { + //! Traits which some [Runtime]s should implement to import functions from Iroha to WASM - #[codec::wrap_trait_fn] - fn get_validate_transaction_payload(state: &S) -> payloads::ValidateTransaction; + use iroha_data_model::{query::QueryBox, smart_contract::payloads::Validate}; - #[codec::wrap_trait_fn] - fn get_validate_instruction_payload(state: &S) -> payloads::ValidateInstruction; + use super::super::*; - #[codec::wrap_trait_fn] - fn get_validate_query_payload(state: &S) -> payloads::ValidateQuery; - } + pub trait ExecuteOperations { + /// Execute `query` on host + #[codec::wrap_trait_fn] + fn execute_query( + query_request: SmartContractQueryRequest, + state: &mut S, + ) -> Result, ValidationFail>; + + /// Execute `instruction` on host + #[codec::wrap_trait_fn] + fn execute_instruction( + instruction: InstructionExpr, + state: &mut S, + ) -> Result<(), ValidationFail>; + } + + pub trait GetExecutorPayloads { + #[codec::wrap_trait_fn] + fn get_migrate_payload(state: &S) -> payloads::Migrate; + + #[codec::wrap_trait_fn] + fn get_validate_transaction_payload(state: &S) -> Validate; + + #[codec::wrap_trait_fn] + fn get_validate_instruction_payload(state: &S) -> Validate; - pub trait SetPermissionTokenSchema { - #[codec::wrap_trait_fn] - fn set_permission_token_schema(schema: PermissionTokenSchema, state: &mut S); + #[codec::wrap_trait_fn] + fn get_validate_query_payload(state: &S) -> Validate; + } + + pub trait SetPermissionTokenSchema { + #[codec::wrap_trait_fn] + fn set_permission_token_schema(schema: PermissionTokenSchema, state: &mut S); + } } } @@ -81,9 +120,15 @@ pub mod error { /// `WebAssembly` execution error type #[derive(Debug, thiserror::Error, displaydoc::Display)] + #[ignore_extra_doc_attributes] pub enum Error { /// Runtime initialization failure Initialization(#[source] WasmtimeError), + /// Runtime finalization failure. + /// + /// Currently only [`crate::query::store::Error`] might fail in this case. + /// [`From`] is not implemented to force users to explicitly wrap this error. + Finalization(#[source] crate::query::store::Error), /// Failed to load module ModuleLoading(#[source] WasmtimeError), /// Module could not be instantiated @@ -186,7 +231,9 @@ pub mod error { Trap::StackOverflow | Trap::MemoryOutOfBounds | Trap::TableOutOfBounds - | Trap::IndirectCallToNull => Self::ExecutionLimitsExceeded(err), + | Trap::IndirectCallToNull + | Trap::OutOfFuel + | Trap::Interrupt => Self::ExecutionLimitsExceeded(err), _ => Self::Other(err), }, None => Self::HostExecution(err), @@ -230,17 +277,30 @@ fn create_config() -> Result { Ok(config) } +/// Remove all executed queries from the query storage. +fn forget_all_executed_queries( + query_handle: &LiveQueryStoreHandle, + executed_queries: impl IntoIterator, +) -> Result<()> { + for query_id in executed_queries { + let _ = query_handle + .drop_query(query_id) + .map_err(Error::Finalization)?; + } + Ok(()) +} + /// Limits checker for smartcontracts. #[derive(Copy, Clone)] -struct LimitsValidator { +struct LimitsExecutor { /// Number of instructions in the smartcontract instruction_count: u64, /// Max allowed number of instructions in the smartcontract max_instruction_count: u64, } -impl LimitsValidator { - /// Create new [`LimitsValidator`] +impl LimitsExecutor { + /// Create new [`LimitsExecutor`] pub fn new(max_instruction_count: u64) -> Self { Self { instruction_count: 0, @@ -268,6 +328,10 @@ impl LimitsValidator { pub mod state { //! All supported states for [`Runtime`](super::Runtime) + use std::collections::HashSet; + + use derive_more::Constructor; + use super::*; /// Construct [`StoreLimits`] from [`Configuration`] @@ -289,258 +353,210 @@ pub mod state { .build() } - /// Common data for states - pub struct Common<'wrld> { + /// State for most common operations. + /// Generic over borrowed [`WorldStateView`] type and specific executable state. + pub struct CommonState { pub(super) authority: AccountId, pub(super) store_limits: StoreLimits, - pub(super) wsv: &'wrld mut WorldStateView, /// Span inside of which all logs are recorded for this smart contract pub(super) log_span: Span, + pub(super) executed_queries: HashSet, + /// Borrowed [`WorldStateView`] kind + pub(super) wsv: W, + /// Concrete state for specific executable + pub(super) specific_state: S, } - impl<'wrld> Common<'wrld> { - /// Create new [`Common`] + impl CommonState { + /// Create new [`OrdinaryState`] pub fn new( - wsv: &'wrld mut WorldStateView, authority: AccountId, config: Configuration, log_span: Span, + wsv: W, + specific_state: S, ) -> Self { Self { - wsv, authority, store_limits: store_limits_from_config(&config), log_span, + executed_queries: HashSet::new(), + wsv, + specific_state, } } - } - - /// Trait to get span for logs. - /// - /// Used to implement [`log()`](Runtime::log) export. - pub trait LogSpan { - /// Get log span - fn log_span(&self) -> &Span; - } - - /// Trait to get mutable reference to limits - /// - /// Used to implement [`Runtime::create_store()`]. - pub trait LimitsMut { - /// Get mutable reference to store limits - fn limits_mut(&mut self) -> &mut StoreLimits; - } - - /// Trait to get authority account id - pub trait Authority { - /// Get authority account id - fn authority(&self) -> &AccountId; - } - - /// Trait to get an immutable reference to [`WorldStateView`] - pub trait Wsv { - /// Get immutable [`WorldStateView`] - fn wsv(&self) -> &WorldStateView; - } - - /// Trait to get mutable reference to [`WorldStateView`] - pub trait WsvMut { - /// Get mutable [`WorldStateView`] - fn wsv_mut(&mut self) -> &mut WorldStateView; - } - - /// Smart Contract execution state - pub struct SmartContract<'wrld> { - pub(super) common: Common<'wrld>, - /// Should be set for smart contract validation only. - pub(super) limits_validator: Option, - } - - impl LogSpan for SmartContract<'_> { - fn log_span(&self) -> &Span { - &self.common.log_span - } - } - impl LimitsMut for SmartContract<'_> { - fn limits_mut(&mut self) -> &mut StoreLimits { - &mut self.common.store_limits + /// Take executed queries leaving an empty set + pub fn take_executed_queries(&mut self) -> HashSet { + std::mem::take(&mut self.executed_queries) } } - impl Authority for SmartContract<'_> { - fn authority(&self) -> &AccountId { - &self.common.authority - } - } - - impl Wsv for SmartContract<'_> { - fn wsv(&self) -> &WorldStateView { - self.common.wsv - } + /// Trait to validate queries and instructions before execution. + pub trait ValidateQueryOperation { + /// Validate `query`. + /// + /// # Errors + /// + /// Returns error if query validation failed. + fn validate_query( + &self, + authority: &AccountId, + query: QueryBox, + ) -> Result<(), ValidationFail>; } - impl WsvMut for SmartContract<'_> { - fn wsv_mut(&mut self) -> &mut WorldStateView { - self.common.wsv - } - } + pub mod wsv { + //! Strongly typed kinds of borrowed [`WorldStateView`] - /// Trigger execution state - pub struct Trigger<'wrld> { - pub(super) common: Common<'wrld>, - /// Event which activated this trigger - pub(super) triggering_event: Event, - } + use super::*; - impl LogSpan for Trigger<'_> { - fn log_span(&self) -> &Span { - &self.common.log_span - } - } + /// Const reference to [`WorldStateView`]. + pub struct WithConst<'wrld>(pub(in super::super) &'wrld WorldStateView); - impl LimitsMut for Trigger<'_> { - fn limits_mut(&mut self) -> &mut StoreLimits { - &mut self.common.store_limits - } - } + /// Mutable reference to [`WorldStateView`]. + pub struct WithMut<'wrld>(pub(in super::super) &'wrld mut WorldStateView); - impl Authority for Trigger<'_> { - fn authority(&self) -> &AccountId { - &self.common.authority + /// Trait to get immutable [`WorldStateView`] + /// + /// Exists to write generic code for [`WithWsv`] and [`WithMutWsv`. + pub trait Wsv { + /// Get immutable [`WorldStateView`] + fn wsv(&self) -> &WorldStateView; } - } - impl Wsv for Trigger<'_> { - fn wsv(&self) -> &WorldStateView { - self.common.wsv + impl Wsv for WithConst<'_> { + fn wsv(&self) -> &WorldStateView { + self.0 + } } - } - impl WsvMut for Trigger<'_> { - fn wsv_mut(&mut self) -> &mut WorldStateView { - self.common.wsv + impl Wsv for WithMut<'_> { + fn wsv(&self) -> &WorldStateView { + self.0 + } } } - pub mod validator { - //! States related to *Validator* execution. + pub mod specific { + //! States for concrete executable entrypoints. use super::*; - /// Struct to encapsulate common state for `validate_transaction()` and - /// `validate_instruction()` entrypoints. - /// - /// *Mut* means that [`WorldStateView`] will be mutated. - pub struct ValidateMut<'wrld, T> { - pub(in super::super) common: Common<'wrld>, - pub(in super::super) to_validate: T, + /// Smart Contract execution state + #[derive(Copy, Clone)] + pub struct SmartContract { + pub(in super::super) limits_executor: Option, } - impl LogSpan for ValidateMut<'_, T> { - fn log_span(&self) -> &Span { - &self.common.log_span + impl SmartContract { + /// Create new [`SmartContract`] + pub(in super::super) fn new(limits_executor: Option) -> Self { + Self { limits_executor } } } - impl LimitsMut for ValidateMut<'_, T> { - fn limits_mut(&mut self) -> &mut StoreLimits { - &mut self.common.store_limits - } + /// Trigger execution state + #[derive(Constructor)] + pub struct Trigger { + /// Event which activated this trigger + pub(in super::super) triggering_event: Event, } - impl Authority for ValidateMut<'_, T> { - fn authority(&self) -> &AccountId { - &self.common.authority - } - } + pub mod executor { + //! States related to *Executor* execution. - impl Wsv for ValidateMut<'_, T> { - fn wsv(&self) -> &WorldStateView { - self.common.wsv - } - } + use super::*; - impl WsvMut for ValidateMut<'_, T> { - fn wsv_mut(&mut self) -> &mut WorldStateView { - self.common.wsv + /// Struct to encapsulate common state kinds for `validate_*` entrypoints + #[derive(Constructor)] + pub struct Validate { + pub(in super::super::super::super) to_validate: T, } - } - /// State for executing `validate_transaction()` entrypoint of validator - pub type ValidateTransaction<'wrld> = ValidateMut<'wrld, VersionedSignedTransaction>; + /// State kind for executing `validate_transaction()` entrypoint of executor + pub type ValidateTransaction = Validate; - /// State for executing `validate_instruction()` entrypoint of validator - pub type ValidateInstruction<'wrld> = ValidateMut<'wrld, InstructionBox>; + /// State kind for executing `validate_query()` entrypoint of executor + pub type ValidateQuery = Validate; - /// State for executing `validate_query()` entrypoint of validator - /// - /// Does not implement [`WsvMut`] because it contains immutable reference to - /// [`WorldStateView`] since it shouldn't be changed during *query* validation. - pub struct ValidateQuery<'wrld> { - pub(in super::super) authority: AccountId, - pub(in super::super) store_limits: StoreLimits, - pub(in super::super) wsv: &'wrld WorldStateView, - pub(in super::super) log_span: Span, - pub(in super::super) query: QueryBox, - } + /// State kind for executing `validate_instruction()` entrypoint of executor + pub type ValidateInstruction = Validate; - impl LogSpan for ValidateQuery<'_> { - fn log_span(&self) -> &Span { - &self.log_span - } + /// State kind for executing `migrate()` entrypoint of executor + #[derive(Copy, Clone)] + pub struct Migrate; } + } - impl LimitsMut for ValidateQuery<'_> { - fn limits_mut(&mut self) -> &mut StoreLimits { - &mut self.store_limits - } - } + /// State for smart contract execution + pub type SmartContract<'wrld> = CommonState, specific::SmartContract>; - impl Authority for ValidateQuery<'_> { - fn authority(&self) -> &AccountId { - &self.authority - } - } + /// State for trigger execution + pub type Trigger<'wrld> = CommonState, specific::Trigger>; - impl Wsv for ValidateQuery<'_> { - fn wsv(&self) -> &WorldStateView { - self.wsv - } + impl ValidateQueryOperation for SmartContract<'_> { + fn validate_query( + &self, + authority: &AccountId, + query: QueryBox, + ) -> Result<(), ValidationFail> { + let wsv: &WorldStateView = self.wsv.0; + wsv.executor().validate_query(wsv, authority, query) } + } - /// State for executing `migrate()` entrypoint of validator - pub struct Migrate<'wrld>(pub(in super::super) Common<'wrld>); - - impl LimitsMut for Migrate<'_> { - fn limits_mut(&mut self) -> &mut StoreLimits { - &mut self.0.store_limits - } + impl ValidateQueryOperation for Trigger<'_> { + fn validate_query( + &self, + authority: &AccountId, + query: QueryBox, + ) -> Result<(), ValidationFail> { + let wsv: &WorldStateView = self.wsv.0; + wsv.executor().validate_query(wsv, authority, query) } + } - impl LogSpan for Migrate<'_> { - fn log_span(&self) -> &Span { - &self.0.log_span - } - } + pub mod executor { + //! States for different executor entrypoints - impl Authority for Migrate<'_> { - fn authority(&self) -> &AccountId { - &self.0.authority - } - } + use super::*; - impl Wsv for Migrate<'_> { - fn wsv(&self) -> &WorldStateView { - self.0.wsv - } + /// State for executing `validate_transaction()` entrypoint + pub type ValidateTransaction<'wrld> = + CommonState, specific::executor::ValidateTransaction>; + + /// State for executing `validate_query()` entrypoint + pub type ValidateQuery<'wrld> = + CommonState, specific::executor::ValidateQuery>; + + /// State for executing `validate_instruction()` entrypoint + pub type ValidateInstruction<'wrld> = + CommonState, specific::executor::ValidateInstruction>; + + /// State for executing `migrate()` entrypoint + pub type Migrate<'wrld> = CommonState, specific::executor::Migrate>; + + macro_rules! impl_blank_validate_operations { + ($($t:ident),+ $(,)?) => { $( + impl ValidateQueryOperation for $t <'_> { + fn validate_query( + &self, + _authority: &AccountId, + _query: QueryBox, + ) -> Result<(), ValidationFail> { + Ok(()) + } + } + )+ }; } - impl WsvMut for Migrate<'_> { - fn wsv_mut(&mut self) -> &mut WorldStateView { - self.0.wsv - } - } + impl_blank_validate_operations!( + ValidateTransaction, + ValidateInstruction, + ValidateQuery, + Migrate, + ); } } @@ -554,23 +570,23 @@ pub struct Runtime { impl Runtime { fn get_memory(caller: &mut impl GetExport) -> Result { caller - .get_export(export::WASM_MEMORY) - .ok_or_else(|| ExportError::not_found(export::WASM_MEMORY))? + .get_export(WASM_MEMORY) + .ok_or_else(|| ExportError::not_found(WASM_MEMORY))? .into_memory() - .ok_or_else(|| ExportError::not_a_memory(export::WASM_MEMORY)) + .ok_or_else(|| ExportError::not_a_memory(WASM_MEMORY)) } fn get_alloc_fn( caller: &mut Caller, ) -> Result, ExportError> { caller - .get_export(export::fn_names::WASM_ALLOC) - .ok_or_else(|| ExportError::not_found(export::fn_names::WASM_ALLOC))? + .get_export(import::SMART_CONTRACT_ALLOC) + .ok_or_else(|| ExportError::not_found(import::SMART_CONTRACT_ALLOC))? .into_func() - .ok_or_else(|| ExportError::not_a_function(export::fn_names::WASM_ALLOC))? + .ok_or_else(|| ExportError::not_a_function(import::SMART_CONTRACT_ALLOC))? .typed::(caller) .map_err(|_error| { - ExportError::wrong_signature::(export::fn_names::WASM_ALLOC) + ExportError::wrong_signature::(import::SMART_CONTRACT_ALLOC) }) } @@ -618,12 +634,12 @@ impl Runtime { let _ = Self::get_typed_func::( instance, store, - export::fn_names::WASM_ALLOC, + import::SMART_CONTRACT_ALLOC, )?; let _ = Self::get_typed_func::<(WasmUsize, WasmUsize), ()>( instance, store, - export::fn_names::WASM_DEALLOC, + import::SMART_CONTRACT_DEALLOC, )?; Ok(()) @@ -641,7 +657,7 @@ impl Runtime { /// # Errors /// /// If string decoding fails - #[allow(clippy::print_stdout, clippy::needless_pass_by_value)] + #[allow(clippy::needless_pass_by_value)] #[codec::wrap(state = "S")] fn dbg(msg: String) { println!("{msg}"); @@ -655,17 +671,20 @@ struct LogError(u8); /// It's required by `#[codec::wrap]` to parse well type WasmtimeError = wasmtime::Error; -impl Runtime { +impl Runtime> { /// Log the given string at the given log level /// /// # Errors /// /// If log level or string decoding fails #[codec::wrap] - pub fn log((log_level, msg): (u8, String), state: &S) -> Result<(), WasmtimeError> { + pub fn log( + (log_level, msg): (u8, String), + state: &state::CommonState, + ) -> Result<(), WasmtimeError> { const TARGET: &str = "WASM"; - let _span = state.log_span().enter(); + let _span = state.log_span.enter(); match LogLevel::from_repr(log_level) .ok_or(LogError(log_level)) .map_err(wasmtime::Error::from)? @@ -688,26 +707,26 @@ impl Runtime { } Ok(()) } -} -impl Runtime { - fn create_store(&self, state: S) -> Store { + fn create_store(&self, state: state::CommonState) -> Store> { let mut store = Store::new(&self.engine, state); - store.limiter(|s| s.limits_mut()); + store.limiter(|s| &mut s.store_limits); store - .add_fuel(self.config.fuel_limit) + .set_fuel(self.config.fuel_limit) .expect("Wasm Runtime config is malformed, this is a bug"); store } +} - fn execute_validator_validate_internal( +impl Runtime> { + fn execute_executor_validate_internal( &self, module: &wasmtime::Module, - state: S, + state: state::CommonState, validate_fn_name: &'static str, - ) -> Result { + ) -> Result { let mut store = self.create_store(state); let instance = self.instantiate_module(module, &mut store)?; @@ -721,40 +740,71 @@ impl Runtime { let memory = Self::get_memory(&mut (&instance, &mut store)).expect("Checked at instantiation step"); let dealloc_fn = - Self::get_typed_func(&instance, &mut store, export::fn_names::WASM_DEALLOC) + Self::get_typed_func(&instance, &mut store, import::SMART_CONTRACT_DEALLOC) .expect("Checked at instantiation step"); - codec::decode_with_length_prefix_from_memory(&memory, &dealloc_fn, &mut store, offset) - .map_err(Error::Decode) + let validation_res = + codec::decode_with_length_prefix_from_memory(&memory, &dealloc_fn, &mut store, offset) + .map_err(Error::Decode)?; + + let mut state = store.into_data(); + let executed_queries = state.take_executed_queries(); + forget_all_executed_queries(state.wsv.wsv().query_handle(), executed_queries)?; + Ok(validation_res) } } -#[allow(clippy::needless_pass_by_value)] -impl Runtime { - fn default_execute_query(query: QueryBox, state: &S) -> Result { - iroha_logger::debug!(%query, "Executing"); - - let wsv = state.wsv(); - - // NOTE: Smart contract (not validator) is trying to execute the query, validate it first - // TODO: Validation should be skipped when executing smart contract. - // There should be two steps validation and execution. First smart contract - // is validated and then it's executed. Here it's validating in both steps. - // Add a flag indicating whether smart contract is being validated or executed - wsv.validator() - .validate_query(wsv, state.authority(), query.clone())?; - - query - .execute(wsv) - .map(|lazy_value| match lazy_value { - LazyValue::Value(value) => value, - LazyValue::Iter(iter) => Value::Vec(iter.collect()), - }) - .map_err(Into::into) +impl Runtime> +where + W: state::wsv::Wsv, + state::CommonState: state::ValidateQueryOperation, +{ + fn default_execute_query( + query_request: SmartContractQueryRequest, + state: &mut state::CommonState, + ) -> Result, ValidationFail> { + iroha_logger::debug!(%query_request, "Executing"); + + match query_request.0 { + QueryRequest::Query(QueryWithParameters { + query, + sorting, + pagination, + fetch_size, + }) => { + let batched = { + let wsv = &state.wsv.wsv(); + state.validate_query(&state.authority, query.clone())?; + let output = query.execute(wsv)?; + + wsv.query_handle() + .handle_query_output(output, &sorting, pagination, fetch_size) + }?; + match &batched { + BatchedResponse::V1(batched) => { + if let Some(query_id) = &batched.cursor.query_id { + state.executed_queries.insert(query_id.clone()); + } + } + } + Ok(batched) + } + QueryRequest::Cursor(cursor) => { + // In a normal situation we already have this `query_id` stored, + // so that's a protection from malicious smart contract + if let Some(query_id) = &cursor.query_id { + state.executed_queries.insert(query_id.clone()); + } + state.wsv.wsv().query_handle().handle_query_cursor(cursor) + } + } + .map_err(Into::into) } +} +impl<'wrld, S> Runtime, S>> { fn default_execute_instruction( - instruction: InstructionBox, - state: &mut S, + instruction: InstructionExpr, + state: &mut state::CommonState, S>, ) -> Result<(), ValidationFail> { debug!(%instruction, "Executing"); @@ -762,10 +812,10 @@ impl Runtime { // There should be two steps validation and execution. First smart contract // is validated and then it's executed. Here it's validating in both steps. // Add a flag indicating whether smart contract is being validated or executed - let authority = state.authority().clone(); - let wsv = state.wsv_mut(); - wsv.validator() - .clone() // Cloning validator is a cheap operation + let authority = state.authority.clone(); + let wsv: &mut WorldStateView = state.wsv.0; + wsv.executor() + .clone() // Cloning executor is a cheap operation .validate_instruction(wsv, &authority, instruction) } } @@ -785,10 +835,13 @@ impl<'wrld> Runtime> { bytes: impl AsRef<[u8]>, ) -> Result<()> { let span = wasm_log_span!("Smart contract execution", %authority); - let state = state::SmartContract { - common: state::Common::new(wsv, authority, self.config, span), - limits_validator: None, - }; + let state = state::SmartContract::new( + authority, + self.config, + span, + state::wsv::WithMut(wsv), + state::specific::SmartContract::new(None), + ); self.execute_smart_contract_with_state(bytes, state) } @@ -799,7 +852,7 @@ impl<'wrld> Runtime> { /// /// - if instructions failed to validate, but queries are permitted /// - if instruction limits are not obeyed - /// - if execution of the smartcontract fails (check ['execute']) + /// - if execution of the smartcontract fails (check [`Self::execute`]) pub fn validate( &mut self, wsv: &'wrld mut WorldStateView, @@ -808,10 +861,13 @@ impl<'wrld> Runtime> { max_instruction_count: u64, ) -> Result<()> { let span = wasm_log_span!("Smart contract validation", %authority); - let state = state::SmartContract { - common: state::Common::new(wsv, authority, self.config, span), - limits_validator: Some(LimitsValidator::new(max_instruction_count)), - }; + let state = state::SmartContract::new( + authority, + self.config, + span, + state::wsv::WithMut(wsv), + state::specific::SmartContract::new(Some(LimitsExecutor::new(max_instruction_count))), + ); self.execute_smart_contract_with_state(bytes, state) } @@ -824,45 +880,44 @@ impl<'wrld> Runtime> { let mut store = self.create_store(state); let smart_contract = self.create_smart_contract(&mut store, bytes)?; - let main_fn = Self::get_typed_func( - &smart_contract, - &mut store, - export::fn_names::SMART_CONTRACT_MAIN, - )?; + let main_fn = + Self::get_typed_func(&smart_contract, &mut store, import::SMART_CONTRACT_MAIN)?; // NOTE: This function takes ownership of the pointer main_fn - .call(store, ()) - .map_err(ExportFnCallError::from) - .map_err(Into::into) + .call(&mut store, ()) + .map_err(ExportFnCallError::from)?; + let mut state = store.into_data(); + let executed_queries = state.take_executed_queries(); + forget_all_executed_queries(state.wsv.0.query_handle(), executed_queries) } #[codec::wrap] fn get_smart_contract_payload(state: &state::SmartContract) -> payloads::SmartContract { payloads::SmartContract { - owner: state.authority().clone(), + owner: state.authority.clone(), } } } -impl<'wrld> import_traits::ExecuteOperations> +impl<'wrld> import::traits::ExecuteOperations> for Runtime> { #[codec::wrap] fn execute_query( - query: QueryBox, - state: &state::SmartContract<'wrld>, - ) -> Result { - Self::default_execute_query(query, state) + query_request: SmartContractQueryRequest, + state: &mut state::SmartContract<'wrld>, + ) -> Result, ValidationFail> { + Self::default_execute_query(query_request, state) } #[codec::wrap] fn execute_instruction( - instruction: InstructionBox, + instruction: InstructionExpr, state: &mut state::SmartContract<'wrld>, ) -> Result<(), ValidationFail> { - if let Some(limits_validator) = state.limits_validator.as_mut() { - limits_validator.check_instruction_limits()?; + if let Some(limits_executor) = state.specific_state.limits_executor.as_mut() { + limits_executor.check_instruction_limits()?; } Self::default_execute_instruction(instruction, state) @@ -885,46 +940,52 @@ impl<'wrld> Runtime> { event: Event, ) -> Result<()> { let span = wasm_log_span!("Trigger execution", %id, %authority); - let state = state::Trigger { - common: state::Common::new(wsv, authority, self.config, span), - triggering_event: event, - }; + let state = state::Trigger::new( + authority, + self.config, + span, + state::wsv::WithMut(wsv), + state::specific::Trigger::new(event), + ); let mut store = self.create_store(state); let instance = self.instantiate_module(module, &mut store)?; - let main_fn = Self::get_typed_func(&instance, &mut store, export::fn_names::TRIGGER_MAIN)?; + let main_fn = Self::get_typed_func(&instance, &mut store, import::TRIGGER_MAIN)?; // NOTE: This function takes ownership of the pointer main_fn - .call(store, ()) - .map_err(ExportFnCallError::from) - .map_err(Into::into) + .call(&mut store, ()) + .map_err(ExportFnCallError::from)?; + + let mut state = store.into_data(); + let executed_queries = state.take_executed_queries(); + forget_all_executed_queries(state.wsv.0.query_handle(), executed_queries) } #[codec::wrap] fn get_trigger_payload(state: &state::Trigger) -> payloads::Trigger { payloads::Trigger { - owner: state.authority().clone(), - event: state.triggering_event.clone(), + owner: state.authority.clone(), + event: state.specific_state.triggering_event.clone(), } } } -impl<'wrld> import_traits::ExecuteOperations> +impl<'wrld> import::traits::ExecuteOperations> for Runtime> { #[codec::wrap] fn execute_query( - query: QueryBox, - state: &state::Trigger<'wrld>, - ) -> Result { - Self::default_execute_query(query, state) + query_request: SmartContractQueryRequest, + state: &mut state::Trigger<'wrld>, + ) -> Result, ValidationFail> { + Self::default_execute_query(query_request, state) } #[codec::wrap] fn execute_instruction( - instruction: InstructionBox, + instruction: InstructionExpr, state: &mut state::Trigger<'wrld>, ) -> Result<(), ValidationFail> { Self::default_execute_instruction(instruction, state) @@ -932,318 +993,316 @@ impl<'wrld> import_traits::ExecuteOperations> } /// Marker trait to auto-implement [`import_traits::ExecuteOperations`] for a concrete -/// *Validator* [`Runtime`]. +/// *Executor* [`Runtime`]. /// /// *Mut* means that [`WorldStateView`] will be mutated. -trait ExecuteOperationsAsValidatorMut {} +trait ExecuteOperationsAsExecutorMut {} -impl import_traits::ExecuteOperations for R +impl<'wrld, R, S> + import::traits::ExecuteOperations, S>> for R where - R: ExecuteOperationsAsValidatorMut, - S: state::Wsv + state::WsvMut + state::Authority, + R: ExecuteOperationsAsExecutorMut, S>>, + state::CommonState, S>: state::ValidateQueryOperation, { #[codec::wrap] - fn execute_query(query: QueryBox, state: &S) -> Result { - debug!(%query, "Executing as validator"); - - query - .execute(state.wsv()) - .map(|lazy_value| match lazy_value { - LazyValue::Value(value) => value, - LazyValue::Iter(iter) => Value::Vec(iter.collect()), - }) - .map_err(Into::into) + fn execute_query( + query_request: SmartContractQueryRequest, + state: &mut state::CommonState, S>, + ) -> Result, ValidationFail> { + debug!(%query_request, "Executing as executor"); + + Runtime::default_execute_query(query_request, state) } #[codec::wrap] fn execute_instruction( - instruction: InstructionBox, - state: &mut S, + instruction: InstructionExpr, + state: &mut state::CommonState, S>, ) -> Result<(), ValidationFail> { - debug!(%instruction, "Executing as validator"); + debug!(%instruction, "Executing as executor"); instruction - .execute(&state.authority().clone(), state.wsv_mut()) + .execute(&state.authority.clone(), state.wsv.0) .map_err(Into::into) } } /// Marker trait to auto-implement [`import_traits::SetPermissionTokenSchema`] for a concrete [`Runtime`]. /// -/// Useful because in *Validator* exist more entrypoints than just `migrate()` which is the +/// Useful because *Executor* exposes more entrypoints than just `migrate()` which is the /// only entrypoint allowed to execute operations on permission tokens. trait FakeSetPermissionTokenSchema { /// Entrypoint function name for panic message const ENTRYPOINT_FN_NAME: &'static str; } -impl import_traits::SetPermissionTokenSchema for R +impl import::traits::SetPermissionTokenSchema for R where R: FakeSetPermissionTokenSchema, { #[codec::wrap] fn set_permission_token_schema(_schema: PermissionTokenSchema, _state: &mut S) { panic!( - "Validator `{}()` entrypoint should not set permission token schema", + "Executor `{}()` entrypoint should not set permission token schema", Self::ENTRYPOINT_FN_NAME ) } } -impl<'wrld> Runtime> { - /// Execute `validate_transaction()` entrypoint of the given module of runtime validator +impl<'wrld> Runtime> { + /// Execute `validate_transaction()` entrypoint of the given module of runtime executor /// /// # Errors /// /// - if failed to instantiate provided `module` /// - if unable to find expected function export /// - if the execution of the smartcontract fails - /// - if unable to decode [`validator::Result`] - pub fn execute_validator_validate_transaction( + /// - if unable to decode [`executor::Result`] + pub fn execute_executor_validate_transaction( &self, wsv: &'wrld mut WorldStateView, - authority: &::Id, + authority: &AccountId, module: &wasmtime::Module, - transaction: VersionedSignedTransaction, - ) -> Result { + transaction: SignedTransaction, + ) -> Result { let span = wasm_log_span!("Running `validate_transaction()`"); - self.execute_validator_validate_internal( + self.execute_executor_validate_internal( module, - state::validator::ValidateTransaction { - common: state::Common::new(wsv, authority.clone(), self.config, span), - to_validate: transaction, - }, - export::fn_names::VALIDATOR_VALIDATE_TRANSACTION, + state::executor::ValidateTransaction::new( + authority.clone(), + self.config, + span, + state::wsv::WithMut(wsv), + state::specific::executor::ValidateTransaction::new(transaction), + ), + import::EXECUTOR_VALIDATE_TRANSACTION, ) } } -impl<'wrld> ExecuteOperationsAsValidatorMut> - for Runtime> +impl<'wrld> ExecuteOperationsAsExecutorMut> + for Runtime> { } -impl<'wrld> import_traits::GetValidatorPayloads> - for Runtime> +impl<'wrld> import::traits::GetExecutorPayloads> + for Runtime> { #[codec::wrap] fn get_migrate_payload( - _state: &state::validator::ValidateTransaction<'wrld>, + _state: &state::executor::ValidateTransaction<'wrld>, ) -> payloads::Migrate { - panic!("Validator `validate_transaction()` entrypoint should not query payload for `migrate()` entrypoint") + panic!("Executor `validate_transaction()` entrypoint should not query payload for `migrate()` entrypoint") } #[codec::wrap] fn get_validate_transaction_payload( - state: &state::validator::ValidateTransaction<'wrld>, - ) -> payloads::ValidateTransaction { - payloads::ValidateTransaction { - authority: state.authority().clone(), - block_height: state.wsv().height(), - to_validate: state.to_validate.clone(), + state: &state::executor::ValidateTransaction<'wrld>, + ) -> Validate { + Validate { + authority: state.authority.clone(), + block_height: state.wsv.0.height(), + to_validate: state.specific_state.to_validate.clone(), } } #[codec::wrap] fn get_validate_instruction_payload( - _state: &state::validator::ValidateTransaction<'wrld>, - ) -> payloads::ValidateInstruction { - panic!("Validator `validate_transaction()` entrypoint should not query payload for `validate_instruction()` entrypoint") + _state: &state::executor::ValidateTransaction<'wrld>, + ) -> Validate { + panic!("Executor `validate_transaction()` entrypoint should not query payload for `validate_instruction()` entrypoint") } #[codec::wrap] fn get_validate_query_payload( - _state: &state::validator::ValidateTransaction<'wrld>, - ) -> payloads::ValidateQuery { - panic!("Validator `validate_transaction()` entrypoint should not query payload for `validate_query()` entrypoint") + _state: &state::executor::ValidateTransaction<'wrld>, + ) -> Validate { + panic!("Executor `validate_transaction()` entrypoint should not query payload for `validate_query()` entrypoint") } } -impl<'wrld> FakeSetPermissionTokenSchema> - for Runtime> +impl<'wrld> FakeSetPermissionTokenSchema> + for Runtime> { const ENTRYPOINT_FN_NAME: &'static str = "validate_transaction"; } -impl<'wrld> Runtime> { - /// Execute `validate_instruction()` entrypoint of the given module of runtime validator +impl<'wrld> Runtime> { + /// Execute `validate_instruction()` entrypoint of the given module of runtime executor /// /// # Errors /// /// - if failed to instantiate provided `module` /// - if unable to find expected function export /// - if the execution of the smartcontract fails - /// - if unable to decode [`validator::Result`] - pub fn execute_validator_validate_instruction( + /// - if unable to decode [`executor::Result`] + pub fn execute_executor_validate_instruction( &self, wsv: &'wrld mut WorldStateView, - authority: &::Id, + authority: &AccountId, module: &wasmtime::Module, - instruction: InstructionBox, - ) -> Result { + instruction: InstructionExpr, + ) -> Result { let span = wasm_log_span!("Running `validate_instruction()`"); - self.execute_validator_validate_internal( + self.execute_executor_validate_internal( module, - state::validator::ValidateInstruction { - common: state::Common::new(wsv, authority.clone(), self.config, span), - to_validate: instruction, - }, - export::fn_names::VALIDATOR_VALIDATE_INSTRUCTION, + state::executor::ValidateInstruction::new( + authority.clone(), + self.config, + span, + state::wsv::WithMut(wsv), + state::specific::executor::ValidateInstruction::new(instruction), + ), + import::EXECUTOR_VALIDATE_INSTRUCTION, ) } } -impl<'wrld> ExecuteOperationsAsValidatorMut> - for Runtime> +impl<'wrld> ExecuteOperationsAsExecutorMut> + for Runtime> { } -impl<'wrld> import_traits::GetValidatorPayloads> - for Runtime> +impl<'wrld> import::traits::GetExecutorPayloads> + for Runtime> { #[codec::wrap] fn get_migrate_payload( - _state: &state::validator::ValidateInstruction<'wrld>, + _state: &state::executor::ValidateInstruction<'wrld>, ) -> payloads::Migrate { - panic!("Validator `validate_instruction()` entrypoint should not query payload for `migrate()` entrypoint") + panic!("Executor `validate_instruction()` entrypoint should not query payload for `migrate()` entrypoint") } #[codec::wrap] fn get_validate_transaction_payload( - _state: &state::validator::ValidateInstruction<'wrld>, - ) -> payloads::ValidateTransaction { - panic!("Validator `validate_instruction()` entrypoint should not query payload for `validate_transaction()` entrypoint") + _state: &state::executor::ValidateInstruction<'wrld>, + ) -> Validate { + panic!("Executor `validate_instruction()` entrypoint should not query payload for `validate_transaction()` entrypoint") } #[codec::wrap] fn get_validate_instruction_payload( - state: &state::validator::ValidateInstruction<'wrld>, - ) -> payloads::ValidateInstruction { - payloads::ValidateInstruction { - authority: state.authority().clone(), - block_height: state.wsv().height(), - to_validate: state.to_validate.clone(), + state: &state::executor::ValidateInstruction<'wrld>, + ) -> Validate { + Validate { + authority: state.authority.clone(), + block_height: state.wsv.0.height(), + to_validate: state.specific_state.to_validate.clone(), } } #[codec::wrap] fn get_validate_query_payload( - _state: &state::validator::ValidateInstruction<'wrld>, - ) -> payloads::ValidateQuery { - panic!("Validator `validate_instruction()` entrypoint should not query payload for `validate_query()` entrypoint") + _state: &state::executor::ValidateInstruction<'wrld>, + ) -> Validate { + panic!("Executor `validate_instruction()` entrypoint should not query payload for `validate_query()` entrypoint") } } -impl<'wrld> FakeSetPermissionTokenSchema> - for Runtime> +impl<'wrld> FakeSetPermissionTokenSchema> + for Runtime> { const ENTRYPOINT_FN_NAME: &'static str = "validate_instruction"; } -impl<'wrld> Runtime> { - /// Execute `validate_query()` entrypoint of the given module of runtime validator +impl<'wrld> Runtime> { + /// Execute `validate_query()` entrypoint of the given module of runtime executor /// /// # Errors /// /// - if failed to instantiate provided `module` /// - if unable to find expected function export /// - if the execution of the smartcontract fails - /// - if unable to decode [`validator::Result`] - pub fn execute_validator_validate_query( + /// - if unable to decode [`executor::Result`] + pub fn execute_executor_validate_query( &self, wsv: &'wrld WorldStateView, - authority: &::Id, + authority: &AccountId, module: &wasmtime::Module, query: QueryBox, - ) -> Result { + ) -> Result { let span = wasm_log_span!("Running `validate_query()`"); - self.execute_validator_validate_internal( + self.execute_executor_validate_internal( module, - state::validator::ValidateQuery { - wsv, - authority: authority.clone(), - store_limits: state::store_limits_from_config(&self.config), - log_span: span, - query, - }, - export::fn_names::VALIDATOR_VALIDATE_QUERY, + state::executor::ValidateQuery::new( + authority.clone(), + self.config, + span, + state::wsv::WithConst(wsv), + state::specific::executor::ValidateQuery::new(query), + ), + import::EXECUTOR_VALIDATE_QUERY, ) } } -impl<'wrld> import_traits::ExecuteOperations> - for Runtime> +impl<'wrld> import::traits::ExecuteOperations> + for Runtime> { #[codec::wrap] fn execute_query( - query: QueryBox, - state: &state::validator::ValidateQuery<'wrld>, - ) -> Result { - debug!(%query, "Executing as validator"); - - query - .execute(state.wsv()) - .map(|lazy_value| match lazy_value { - LazyValue::Value(value) => value, - LazyValue::Iter(iter) => Value::Vec(iter.collect()), - }) - .map_err(Into::into) + query_request: SmartContractQueryRequest, + state: &mut state::executor::ValidateQuery<'wrld>, + ) -> Result, ValidationFail> { + debug!(%query_request, "Executing as executor"); + + Runtime::default_execute_query(query_request, state) } #[codec::wrap] fn execute_instruction( - _instruction: InstructionBox, - _state: &mut state::validator::ValidateQuery<'wrld>, + _instruction: InstructionExpr, + _state: &mut state::executor::ValidateQuery<'wrld>, ) -> Result<(), ValidationFail> { - panic!("Validator `validate_query()` entrypoint should not execute instructions") + panic!("Executor `validate_query()` entrypoint should not execute instructions") } } -impl<'wrld> import_traits::GetValidatorPayloads> - for Runtime> +impl<'wrld> import::traits::GetExecutorPayloads> + for Runtime> { #[codec::wrap] - fn get_migrate_payload(_state: &state::validator::ValidateQuery<'wrld>) -> payloads::Migrate { - panic!("Validator `validate_query()` entrypoint should not query payload for `migrate()` entrypoint") + fn get_migrate_payload(_state: &state::executor::ValidateQuery<'wrld>) -> payloads::Migrate { + panic!("Executor `validate_query()` entrypoint should not query payload for `migrate()` entrypoint") } #[codec::wrap] fn get_validate_transaction_payload( - _state: &state::validator::ValidateQuery<'wrld>, - ) -> payloads::ValidateTransaction { - panic!("Validator `validate_query()` entrypoint should not query payload for `validate_transaction()` entrypoint") + _state: &state::executor::ValidateQuery<'wrld>, + ) -> Validate { + panic!("Executor `validate_query()` entrypoint should not query payload for `validate_transaction()` entrypoint") } #[codec::wrap] fn get_validate_instruction_payload( - _state: &state::validator::ValidateQuery<'wrld>, - ) -> payloads::ValidateInstruction { - panic!("Validator `validate_query()` entrypoint should not query payload for `validate_instruction()` entrypoint") + _state: &state::executor::ValidateQuery<'wrld>, + ) -> Validate { + panic!("Executor `validate_query()` entrypoint should not query payload for `validate_instruction()` entrypoint") } #[codec::wrap] fn get_validate_query_payload( - state: &state::validator::ValidateQuery<'wrld>, - ) -> payloads::ValidateQuery { - payloads::ValidateQuery { - authority: state.authority().clone(), - block_height: state.wsv().height(), - to_validate: state.query.clone(), + state: &state::executor::ValidateQuery<'wrld>, + ) -> Validate { + Validate { + authority: state.authority.clone(), + block_height: state.wsv.0.height(), + to_validate: state.specific_state.to_validate.clone(), } } } -impl<'wrld> FakeSetPermissionTokenSchema> - for Runtime> +impl<'wrld> FakeSetPermissionTokenSchema> + for Runtime> { const ENTRYPOINT_FN_NAME: &'static str = "validate_query"; } -impl<'wrld> Runtime> { - /// Execute `migrate()` entrypoint of *Validator* +impl<'wrld> Runtime> { + /// Execute `migrate()` entrypoint of *Executor* /// /// # Errors /// @@ -1251,25 +1310,25 @@ impl<'wrld> Runtime> { /// - if failed to get export function for `migrate()` /// - if failed to call export function /// - if failed to decode [`MigrationResult`] - pub fn execute_validator_migration( + pub fn execute_executor_migration( &self, wsv: &'wrld mut WorldStateView, - authority: &::Id, + authority: &AccountId, module: &wasmtime::Module, ) -> Result { let span = wasm_log_span!("Running migration"); - let state = state::validator::Migrate(state::Common::new( - wsv, + let state = state::executor::Migrate::new( authority.clone(), self.config, span, - )); + state::wsv::WithMut(wsv), + state::specific::executor::Migrate, + ); let mut store = self.create_store(state); let instance = self.instantiate_module(module, &mut store)?; - let migrate_fn = - Self::get_typed_func(&instance, &mut store, export::fn_names::VALIDATOR_MIGRATE)?; + let migrate_fn = Self::get_typed_func(&instance, &mut store, import::EXECUTOR_MIGRATE)?; let offset = migrate_fn .call(&mut store, ()) @@ -1278,15 +1337,15 @@ impl<'wrld> Runtime> { let memory = Self::get_memory(&mut (&instance, &mut store)).expect("Checked at instantiation step"); let dealloc_fn = - Self::get_typed_func(&instance, &mut store, export::fn_names::WASM_DEALLOC) + Self::get_typed_func(&instance, &mut store, import::SMART_CONTRACT_DEALLOC) .expect("Checked at instantiation step"); codec::decode_with_length_prefix_from_memory(&memory, &dealloc_fn, &mut store, offset) .map_err(Error::Decode) } } -impl<'wrld> ExecuteOperationsAsValidatorMut> - for Runtime> +impl<'wrld> ExecuteOperationsAsExecutorMut> + for Runtime> { } @@ -1299,49 +1358,47 @@ impl<'wrld> ExecuteOperationsAsValidatorMut> /// /// Panics with error message if called, because it should never be called from /// `migrate()` entrypoint. -impl<'wrld> import_traits::GetValidatorPayloads> - for Runtime> +impl<'wrld> import::traits::GetExecutorPayloads> + for Runtime> { #[codec::wrap] - fn get_migrate_payload(state: &state::validator::Migrate<'wrld>) -> payloads::Migrate { + fn get_migrate_payload(state: &state::executor::Migrate<'wrld>) -> payloads::Migrate { payloads::Migrate { - block_height: state.wsv().height(), + block_height: state.wsv.0.height(), } } #[codec::wrap] fn get_validate_transaction_payload( - _state: &state::validator::Migrate<'wrld>, - ) -> payloads::ValidateTransaction { - panic!("Validator `migrate()` entrypoint should not query payload for `validate_transaction()` entrypoint") + _state: &state::executor::Migrate<'wrld>, + ) -> Validate { + panic!("Executor `migrate()` entrypoint should not query payload for `validate_transaction()` entrypoint") } #[codec::wrap] fn get_validate_instruction_payload( - _state: &state::validator::Migrate<'wrld>, - ) -> payloads::ValidateInstruction { - panic!("Validator `migrate()` entrypoint should not query payload for `validate_instruction()` entrypoint") + _state: &state::executor::Migrate<'wrld>, + ) -> Validate { + panic!("Executor `migrate()` entrypoint should not query payload for `validate_instruction()` entrypoint") } #[codec::wrap] - fn get_validate_query_payload( - _state: &state::validator::Migrate<'wrld>, - ) -> payloads::ValidateQuery { - panic!("Validator `migrate()` entrypoint should not query payload for `validate_query()` entrypoint") + fn get_validate_query_payload(_state: &state::executor::Migrate<'wrld>) -> Validate { + panic!("Executor `migrate()` entrypoint should not query payload for `validate_query()` entrypoint") } } -impl<'wrld> import_traits::SetPermissionTokenSchema> - for Runtime> +impl<'wrld> import::traits::SetPermissionTokenSchema> + for Runtime> { #[codec::wrap] fn set_permission_token_schema( schema: PermissionTokenSchema, - state: &mut state::validator::Migrate<'wrld>, + state: &mut state::executor::Migrate<'wrld>, ) { debug!(%schema, "Setting permission token schema"); - state.wsv_mut().set_permission_token_schema(schema) + state.wsv.0.set_permission_token_schema(schema) } } @@ -1403,24 +1460,24 @@ impl RuntimeBuilder { macro_rules! create_imports { ( $linker:ident, - $(import::fn_names:: $name:ident => $fn_path:path),* $(,)? + $(export::$name:ident => $fn_path:path),* $(,)? ) => { $linker.func_wrap( - import::MODULE, - import::fn_names::LOG, + WASM_MODULE, + export::LOG, Runtime::log, ) .and_then(|l| { l.func_wrap( - import::MODULE, - import::fn_names::DBG, + WASM_MODULE, + export::DBG, Runtime::dbg, ) }) $(.and_then(|l| { l.func_wrap( - import::MODULE, - import::fn_names::$name, + WASM_MODULE, + export::$name, $fn_path, ) }))* @@ -1439,9 +1496,9 @@ impl<'wrld> RuntimeBuilder> { let mut linker = Linker::new(engine); create_imports!(linker, - import::fn_names::EXECUTE_ISI => Runtime::>::execute_instruction, - import::fn_names::EXECUTE_QUERY => Runtime::>::execute_query, - import::fn_names::GET_SMART_CONTRACT_PAYLOAD => Runtime::get_smart_contract_payload, + export::EXECUTE_ISI => Runtime::>::execute_instruction, + export::EXECUTE_QUERY => Runtime::>::execute_query, + export::GET_SMART_CONTRACT_PAYLOAD => Runtime::get_smart_contract_payload, )?; Ok(linker) }) @@ -1459,105 +1516,105 @@ impl<'wrld> RuntimeBuilder> { let mut linker = Linker::new(engine); create_imports!(linker, - import::fn_names::EXECUTE_ISI => Runtime::>::execute_instruction, - import::fn_names::EXECUTE_QUERY => Runtime::>::execute_query, - import::fn_names::GET_TRIGGER_PAYLOAD => Runtime::get_trigger_payload, + export::EXECUTE_ISI => Runtime::>::execute_instruction, + export::EXECUTE_QUERY => Runtime::>::execute_query, + export::GET_TRIGGER_PAYLOAD => Runtime::get_trigger_payload, )?; Ok(linker) }) } } -impl<'wrld> RuntimeBuilder> { - /// Builds the [`Runtime`] for *Validator* `validate_transaction()` execution +impl<'wrld> RuntimeBuilder> { + /// Builds the [`Runtime`] for *Executor* `validate_transaction()` execution /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build(self) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); create_imports!(linker, - import::fn_names::EXECUTE_ISI => Runtime::>::execute_instruction, - import::fn_names::EXECUTE_QUERY => Runtime::>::execute_query, - import::fn_names::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, - import::fn_names::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, - import::fn_names::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, - import::fn_names::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, - import::fn_names::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, + export::EXECUTE_ISI => Runtime::>::execute_instruction, + export::EXECUTE_QUERY => Runtime::>::execute_query, + export::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, + export::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, + export::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, + export::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, + export::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, )?; Ok(linker) }) } } -impl<'wrld> RuntimeBuilder> { - /// Builds the [`Runtime`] for *Validator* `validate_instruction()` execution +impl<'wrld> RuntimeBuilder> { + /// Builds the [`Runtime`] for *Executor* `validate_instruction()` execution /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build(self) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); create_imports!(linker, - import::fn_names::EXECUTE_ISI => Runtime::>::execute_instruction, - import::fn_names::EXECUTE_QUERY => Runtime::>::execute_query, - import::fn_names::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, - import::fn_names::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, - import::fn_names::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, - import::fn_names::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, - import::fn_names::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, + export::EXECUTE_ISI => Runtime::>::execute_instruction, + export::EXECUTE_QUERY => Runtime::>::execute_query, + export::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, + export::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, + export::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, + export::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, + export::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, )?; Ok(linker) }) } } -impl<'wrld> RuntimeBuilder> { - /// Builds the [`Runtime`] for *Validator* `validate_query()` execution +impl<'wrld> RuntimeBuilder> { + /// Builds the [`Runtime`] for *Executor* `validate_query()` execution /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build(self) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); create_imports!(linker, - import::fn_names::EXECUTE_ISI => Runtime::>::execute_instruction, - import::fn_names::EXECUTE_QUERY => Runtime::>::execute_query, - import::fn_names::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, - import::fn_names::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, - import::fn_names::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, - import::fn_names::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, - import::fn_names::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, + export::EXECUTE_ISI => Runtime::>::execute_instruction, + export::EXECUTE_QUERY => Runtime::>::execute_query, + export::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, + export::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, + export::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, + export::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, + export::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, )?; Ok(linker) }) } } -impl<'wrld> RuntimeBuilder> { - /// Builds the [`Runtime`] to execute `permission_tokens()` entrypoint of *Validator* +impl<'wrld> RuntimeBuilder> { + /// Builds the [`Runtime`] to execute `permission_tokens()` entrypoint of *Executor* /// /// # Errors /// /// Fails if failed to create default linker. - pub fn build(self) -> Result>> { + pub fn build(self) -> Result>> { self.finalize(|engine| { let mut linker = Linker::new(engine); create_imports!(linker, - import::fn_names::EXECUTE_ISI => Runtime::>::execute_instruction, - import::fn_names::EXECUTE_QUERY => Runtime::>::execute_query, - import::fn_names::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, - import::fn_names::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, - import::fn_names::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, - import::fn_names::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, - import::fn_names::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, + export::EXECUTE_ISI => Runtime::>::execute_instruction, + export::EXECUTE_QUERY => Runtime::>::execute_query, + export::GET_MIGRATE_PAYLOAD => Runtime::get_migrate_payload, + export::GET_VALIDATE_TRANSACTION_PAYLOAD => Runtime::get_validate_transaction_payload, + export::GET_VALIDATE_INSTRUCTION_PAYLOAD => Runtime::get_validate_instruction_payload, + export::GET_VALIDATE_QUERY_PAYLOAD => Runtime::get_validate_query_payload, + export::SET_PERMISSION_TOKEN_SCHEMA => Runtime::set_permission_token_schema, )?; Ok(linker) }) @@ -1583,15 +1640,18 @@ impl GetExport for (&wasmtime::Instance, C) { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use std::str::FromStr as _; use iroha_crypto::KeyPair; + use iroha_data_model::query::{sorting::Sorting, Pagination}; use parity_scale_codec::Encode; + use tokio::test; use super::*; - use crate::{kura::Kura, smartcontracts::isi::Registrable as _, PeersIds, World}; + use crate::{ + kura::Kura, query::store::LiveQueryStore, smartcontracts::isi::Registrable as _, PeersIds, + World, + }; fn world_with_test_account(authority: &AccountId) -> World { let domain_id = authority.domain_id.clone(); @@ -1624,9 +1684,9 @@ mod tests { (func (export "{dealloc_fn_name}") (param $size i32) (param $len i32) nop) "#, - memory_name = export::WASM_MEMORY, - alloc_fn_name = export::fn_names::WASM_ALLOC, - dealloc_fn_name = export::fn_names::WASM_DEALLOC, + memory_name = WASM_MEMORY, + alloc_fn_name = import::SMART_CONTRACT_ALLOC, + dealloc_fn_name = import::SMART_CONTRACT_DEALLOC, isi_len = isi_hex.len() / 3, isi_hex = isi_hex, ) @@ -1648,15 +1708,16 @@ mod tests { } #[test] - fn execute_instruction_exported() -> Result<(), Error> { + async fn execute_instruction_exported() -> Result<(), Error> { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); - let register_isi = RegisterBox::new(Account::new(new_authority, [])); - encode_hex(InstructionBox::from(register_isi)) + let register_isi = RegisterExpr::new(Account::new(new_authority, [])); + encode_hex(InstructionExpr::from(register_isi)) }; let wat = format!( @@ -1675,8 +1736,8 @@ mod tests { ;; No use of return values drop)) "#, - main_fn_name = export::fn_names::SMART_CONTRACT_MAIN, - execute_fn_name = import::fn_names::EXECUTE_ISI, + main_fn_name = import::SMART_CONTRACT_MAIN, + execute_fn_name = export::EXECUTE_ISI, memory_and_alloc = memory_and_alloc(&isi_hex), isi_len = isi_hex.len() / 3, ); @@ -1689,11 +1750,17 @@ mod tests { } #[test] - fn execute_query_exported() -> Result<(), Error> { + async fn execute_query_exported() -> Result<(), Error> { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura); - let query_hex = encode_hex(QueryBox::from(FindAccountById::new(authority.clone()))); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); + let query_hex = encode_hex(SmartContractQueryRequest::query( + QueryBox::from(FindAccountById::new(authority.clone())), + Sorting::default(), + Pagination::default(), + FetchSize::default(), + )); let wat = format!( r#" @@ -1711,8 +1778,8 @@ mod tests { ;; No use of return values drop)) "#, - main_fn_name = export::fn_names::SMART_CONTRACT_MAIN, - execute_fn_name = import::fn_names::EXECUTE_QUERY, + main_fn_name = import::SMART_CONTRACT_MAIN, + execute_fn_name = export::EXECUTE_QUERY, memory_and_alloc = memory_and_alloc(&query_hex), isi_len = query_hex.len() / 3, ); @@ -1726,16 +1793,17 @@ mod tests { } #[test] - fn instruction_limit_reached() -> Result<(), Error> { + async fn instruction_limit_reached() -> Result<(), Error> { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); + let query_handle = LiveQueryStore::test().start(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura); + let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); - let register_isi = RegisterBox::new(Account::new(new_authority, [])); - encode_hex(InstructionBox::from(register_isi)) + let register_isi = RegisterExpr::new(Account::new(new_authority, [])); + encode_hex(InstructionExpr::from(register_isi)) }; let wat = format!( @@ -1752,8 +1820,8 @@ mod tests { (call $exec_fn (i32.const 0) (i32.const {isi1_end})) (call $exec_fn (i32.const {isi1_end}) (i32.const {isi2_end})))) "#, - main_fn_name = export::fn_names::SMART_CONTRACT_MAIN, - execute_fn_name = import::fn_names::EXECUTE_ISI, + main_fn_name = import::SMART_CONTRACT_MAIN, + execute_fn_name = export::EXECUTE_ISI, // Store two instructions into adjacent memory and execute them memory_and_alloc = memory_and_alloc(&isi_hex.repeat(2)), isi1_end = isi_hex.len() / 3, @@ -1775,15 +1843,16 @@ mod tests { } #[test] - fn instructions_not_allowed() -> Result<(), Error> { + async fn instructions_not_allowed() -> Result<(), Error> { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); let isi_hex = { let new_authority = AccountId::from_str("mad_hatter@wonderland").expect("Valid"); - let register_isi = RegisterBox::new(Account::new(new_authority, [])); - encode_hex(InstructionBox::from(register_isi)) + let register_isi = RegisterExpr::new(Account::new(new_authority, [])); + encode_hex(InstructionExpr::from(register_isi)) }; let wat = format!( @@ -1802,8 +1871,8 @@ mod tests { ) ) "#, - main_fn_name = export::fn_names::SMART_CONTRACT_MAIN, - execute_fn_name = import::fn_names::EXECUTE_ISI, + main_fn_name = import::SMART_CONTRACT_MAIN, + execute_fn_name = export::EXECUTE_ISI, memory_and_alloc = memory_and_alloc(&isi_hex), isi_len = isi_hex.len() / 3, ); @@ -1823,10 +1892,11 @@ mod tests { } #[test] - fn queries_not_allowed() -> Result<(), Error> { + async fn queries_not_allowed() -> Result<(), Error> { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); let query_hex = encode_hex(QueryBox::from(FindAccountById::new(authority.clone()))); let wat = format!( @@ -1845,8 +1915,8 @@ mod tests { ;; No use of return value drop)) "#, - main_fn_name = export::fn_names::SMART_CONTRACT_MAIN, - execute_fn_name = import::fn_names::EXECUTE_QUERY, + main_fn_name = import::SMART_CONTRACT_MAIN, + execute_fn_name = export::EXECUTE_QUERY, memory_and_alloc = memory_and_alloc(&query_hex), isi_len = query_hex.len() / 3, ); @@ -1864,10 +1934,11 @@ mod tests { } #[test] - fn trigger_related_func_is_not_linked_for_smart_contract() -> Result<(), Error> { + async fn trigger_related_func_is_not_linked_for_smart_contract() -> Result<(), Error> { let authority = AccountId::from_str("alice@wonderland").expect("Valid"); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world_with_test_account(&authority), kura, query_handle); let query_hex = encode_hex(QueryBox::from(FindAccountById::new(authority.clone()))); let wat = format!( @@ -1886,8 +1957,8 @@ mod tests { ;; No use of return values drop)) "#, - main_fn_name = export::fn_names::SMART_CONTRACT_MAIN, - get_trigger_payload_fn_name = import::fn_names::GET_TRIGGER_PAYLOAD, + main_fn_name = import::SMART_CONTRACT_MAIN, + get_trigger_payload_fn_name = export::GET_TRIGGER_PAYLOAD, memory_and_alloc = memory_and_alloc(&query_hex), ); diff --git a/core/src/snapshot.rs b/core/src/snapshot.rs index feb6830d441..8ccbd0b318e 100644 --- a/core/src/snapshot.rs +++ b/core/src/snapshot.rs @@ -1,11 +1,4 @@ //! This module contains [`WorldStateView`] snapshot actor service. - -#![allow( - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects -)] - use std::{ io::Read, path::{Path, PathBuf}, @@ -15,13 +8,14 @@ use std::{ use iroha_config::snapshot::Configuration; use iroha_crypto::HashOf; -use iroha_data_model::block::VersionedCommittedBlock; +use iroha_data_model::block::SignedBlock; use iroha_logger::prelude::*; use serde::{de::DeserializeSeed, Serialize}; use tokio::sync::mpsc; use crate::{ kura::{BlockCount, Kura}, + query::store::LiveQueryStoreHandle, sumeragi::SumeragiHandle, wsv::{KuraSeed, WorldStateView}, }; @@ -162,6 +156,7 @@ impl SnapshotMaker { pub fn try_read_snapshot( snapshot_dir: impl AsRef, kura: &Arc, + query_handle: LiveQueryStoreHandle, BlockCount(block_count): BlockCount, ) -> Result { let mut bytes = Vec::new(); @@ -175,6 +170,7 @@ pub fn try_read_snapshot( let mut deserializer = serde_json::Deserializer::from_slice(&bytes); let seed = KuraSeed { kura: Arc::clone(kura), + query_handle, }; let wsv = seed.deserialize(&mut deserializer)?; let snapshot_height = wsv.block_hashes.len(); @@ -219,8 +215,8 @@ pub enum Error { /// Height at which block hashes differs between snapshot and [`Kura`] height: usize, /// Hash of the block stored in snapshot - snapshot_block_hash: HashOf, + snapshot_block_hash: HashOf, /// Hash of the block stored in kura - kura_block_hash: HashOf, + kura_block_hash: HashOf, }, } diff --git a/core/src/sumeragi/main_loop.rs b/core/src/sumeragi/main_loop.rs index dbb0acb815b..29f815428d6 100644 --- a/core/src/sumeragi/main_loop.rs +++ b/core/src/sumeragi/main_loop.rs @@ -1,12 +1,15 @@ //! The main event loop that powers sumeragi. -#![allow(clippy::cognitive_complexity)] use std::sync::mpsc; -use iroha_data_model::{block::*, peer::PeerId, transaction::error::TransactionRejectionReason}; +use iroha_crypto::HashOf; +use iroha_data_model::{ + block::*, events::pipeline::PipelineEvent, peer::PeerId, + transaction::error::TransactionRejectionReason, +}; use iroha_p2p::UpdateTopology; use tracing::{span, Level}; -use super::*; +use super::{view_change::ProofBuilder, *}; use crate::{block::*, sumeragi::tracing::instrument}; /// `Sumeragi` is the implementation of the consensus. @@ -44,11 +47,11 @@ pub struct Sumeragi { pub debug_force_soft_fork: bool, /// The current network topology. pub current_topology: Topology, - /// The sumeragi internal `WorldStateView`. This will probably + /// The sumeragi internal [`WorldStateView`]. This will probably /// morph into a wsv + various patches as we attempt to /// multithread isi execution. In the future we might also once /// again merge the internal wsv with the public facing one. But - /// as of now we keep them seperate for greater flexibility when + /// as of now we keep them separate for greater flexibility when /// optimizing. pub wsv: WorldStateView, /// A copy of wsv that is kept one block behind at all times. Because @@ -78,13 +81,12 @@ impl Sumeragi { /// # Errors /// Fails if network sending fails #[instrument(skip(self, packet))] - #[allow(clippy::needless_pass_by_value)] // TODO: Fix. fn post_packet_to(&self, packet: MessagePacket, peer: &PeerId) { if peer == &self.peer_id { return; } let post = iroha_p2p::Post { - data: NetworkMessage::SumeragiPacket(Box::new(packet.into())), + data: NetworkMessage::SumeragiPacket(Box::new(packet)), peer_id: peer.clone(), }; self.network.post(post); @@ -101,17 +103,16 @@ impl Sumeragi { } } - #[allow(clippy::needless_pass_by_value)] fn broadcast_packet(&self, msg: MessagePacket) { let broadcast = iroha_p2p::Broadcast { - data: NetworkMessage::SumeragiPacket(Box::new(msg.into())), + data: NetworkMessage::SumeragiPacket(Box::new(msg)), }; self.network.broadcast(broadcast); } /// Connect or disconnect peers according to the current network topology. fn connect_peers(&self, topology: &Topology) { - let peers = topology.sorted_peers.clone().into_iter().collect(); + let peers = topology.ordered_peers.clone().into_iter().collect(); self.network.update_topology(UpdateTopology(peers)); } @@ -121,20 +122,19 @@ impl Sumeragi { self.block_time + self.commit_time } - fn send_events(&self, events: impl Into>) { + fn send_events(&self, events: impl IntoIterator>) { let addr = &self.peer_id.address; if self.events_sender.receiver_count() > 0 { - for event in events.into() { + for event in events { self.events_sender - .send(event) + .send(event.into()) .map_err(|err| warn!(%addr, ?err, "Event not sent")) .unwrap_or(0); } } } - #[allow(clippy::panic)] fn receive_network_packet( &self, view_change_proof_chain: &mut ProofChain, @@ -172,7 +172,7 @@ impl Sumeragi { .and_then(|packet : MessagePacket| { if let Err(error) = view_change_proof_chain.merge( packet.view_change_proofs, - &self.current_topology.sorted_peers, + &self.current_topology.ordered_peers, self.current_topology.max_faults(), self.wsv.latest_block_hash(), ) { @@ -182,7 +182,6 @@ impl Sumeragi { }) } - #[allow(clippy::panic, clippy::panic_in_result_fn)] fn init_listen_for_genesis( &mut self, shutdown_receiver: &mut tokio::sync::oneshot::Receiver<()>, @@ -195,67 +194,37 @@ impl Sumeragi { debug!(?e, "Early return."); e })?; - // we must connect to peers so that our block_sync can find us - // the genesis block. + match self.message_receiver.try_recv() { Ok(packet) => { if let Some(message) = packet.message { - let (block, new_wsv) = match message { - Message::BlockCreated(BlockCreated { block }) => { - let mut new_wsv = self.wsv.clone(); - // If we receive a committed genesis block that is - // valid, use it without question. During the - // genesis round we blindly take on the network - // topology described in the provided genesis - // block. - let block = { - let span = span!( - Level::TRACE, - "Genesis Round Peer is revalidating the block." - ); - let _enter = span.enter(); - match block.revalidate(&mut new_wsv) { - Ok(()) => block, - Err(error) => { - error!(?error); - continue; - } - } - }; - // Omit signature verification during genesis round - (block.commit_unchecked().into(), new_wsv) - } - Message::BlockSyncUpdate(BlockSyncUpdate { block }) => { - let mut new_wsv = self.wsv.clone(); - // Omit signature verification during genesis round - match block.revalidate(&mut new_wsv) { - Ok(()) => (block, new_wsv), - Err(error) => { - error!(?error); - continue; - } - } - } + let mut new_wsv = self.wsv.clone(); + + let block = match message { + Message::BlockCreated(BlockCreated { block }) + | Message::BlockSyncUpdate(BlockSyncUpdate { block }) => block, msg => { trace!(?msg, "Not handling the message, waiting for genesis..."); continue; } }; - if block.is_genesis() { - match &block { - VersionedCommittedBlock::V1(block) => { - assert!( - !block.transactions.iter().any(|tx| tx.error.is_some()), - "Genesis transaction set contains invalid transactions" - ); + let block = + match ValidBlock::validate(block, &self.current_topology, &mut new_wsv) + .and_then(|block| { + block + .commit(&self.current_topology) + .map_err(|(block, error)| (block.into(), error)) + }) { + Ok(block) => block, + Err((_, error)) => { + error!(?error, "Received invalid genesis block"); + continue; } - } + }; - self.commit_block(block, new_wsv); - return Err(EarlyReturn::GenesisBlockReceivedAndCommitted); - } - debug!("Received a block that was not genesis."); + self.commit_block(block, new_wsv); + return Err(EarlyReturn::GenesisBlockReceivedAndCommitted); } } Err(mpsc::TryRecvError::Disconnected) => return Err(EarlyReturn::Disconnected), @@ -267,8 +236,6 @@ impl Sumeragi { fn sumeragi_init_commit_genesis(&mut self, genesis_network: GenesisNetwork) { std::thread::sleep(Duration::from_millis(250)); - info!("Initializing iroha using the genesis block."); - assert_eq!(self.wsv.height(), 0); assert_eq!(self.wsv.latest_block_hash(), None); @@ -278,115 +245,85 @@ impl Sumeragi { .map(AcceptedTransaction::accept_genesis) .collect(); - // Don't start genesis round. Instead just commit the genesis block. - assert!( - !transactions.is_empty(), - "Genesis transaction set contains no valid transactions" + let mut new_wsv = self.wsv.clone(); + let genesis = BlockBuilder::new(transactions, self.current_topology.clone(), vec![]) + .chain(0, &mut new_wsv) + .sign(self.key_pair.clone()) + .expect("Genesis signing failed"); + + let genesis_msg = MessagePacket::new( + ProofChain::default(), + Some(BlockCreated::from(genesis.clone()).into()), ); - let mut new_wsv = self.wsv.clone(); - let block = BlockBuilder { - transactions, - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: self.current_topology.clone(), - key_pair: self.key_pair.clone(), - wsv: &mut new_wsv, - } - .build(); + let genesis = genesis + .commit(&self.current_topology) + .expect("Genesis invalid"); + assert!( - !block.transactions.iter().any(|tx| tx.error.is_some()), - "Genesis transaction set contains invalid transactions" + !genesis + .payload() + .transactions + .iter() + .any(|tx| tx.error.is_some()), + "Genesis contains invalid transactions" ); - { - info!(block_partial_hash = %block.partial_hash(), "Publishing genesis block."); - - info!( - role = ?self.current_topology.role(&self.peer_id), - block_partial_hash = %block.partial_hash(), - "Created a block to commit.", - ); + info!( + role = ?self.current_topology.role(&self.peer_id), + block_hash = %genesis.hash(), + "Genesis block created", + ); - self.send_events(&block); - let msg = MessagePacket::new( - ProofChain::default(), - Some(BlockCreated::from(block.clone()).into()), - ); - self.broadcast_packet(msg); - // Omit signature verification during genesis round - self.commit_block(block.commit_unchecked(), new_wsv); - } + self.commit_block(genesis, new_wsv); + self.broadcast_packet(genesis_msg); } - fn commit_block(&mut self, block: impl Into, new_wsv: WorldStateView) { + fn commit_block(&mut self, block: CommittedBlock, new_wsv: WorldStateView) { self.update_state::(block, new_wsv); } - fn replace_top_block( - &mut self, - block: impl Into, - new_wsv: WorldStateView, - ) { + fn replace_top_block(&mut self, block: CommittedBlock, new_wsv: WorldStateView) { self.update_state::(block, new_wsv); } - fn update_topology(&mut self, block_signees: &[PublicKey], peers: Vec) { - let mut topology = Topology::new(peers); - - topology.update_topology( - block_signees, - self.wsv.peers_ids().iter().cloned().collect(), - ); - - self.current_topology = topology; - self.connect_peers(&self.current_topology); - } - fn update_state( &mut self, - block: impl Into, + block: CommittedBlock, mut new_wsv: WorldStateView, ) { - let committed_block = block.into(); - info!( addr=%self.peer_id.address, role=%self.current_topology.role(&self.peer_id), block_height=%self.wsv.height(), - block_hash=%committed_block.hash(), + block_hash=%block.hash(), "{}", Strategy::LOG_MESSAGE, ); Strategy::before_update_hook(self); new_wsv - .apply_without_execution(&committed_block) + .apply_without_execution(&block) .expect("Failed to apply block on WSV. Bailing."); self.wsv = new_wsv; - let events_buffer = core::mem::take(&mut self.wsv.events_buffer); - self.send_events(events_buffer); + let wsv_events = core::mem::take(&mut self.wsv.events_buffer); + self.send_events(wsv_events); // Parameters are updated before updating public copy of sumeragi self.update_params(); - let events: Vec<_> = (&committed_block).into(); - let topology = committed_block - .as_v1() - .header() - .committed_with_topology - .clone(); - let block_signees = committed_block - .signatures() - .map(|s| s.public_key()) - .cloned() - .collect::>(); + let new_topology = Topology::recreate_topology( + block.as_ref(), + 0, + self.wsv.peers_ids().iter().cloned().collect(), + ); + let events = block.produce_events(); // https://github.com/hyperledger/iroha/issues/3396 // Kura should store the block only upon successful application to the internal WSV to avoid storing a corrupted block. // Public-facing WSV update should happen after that and be followed by `BlockCommited` event to prevent client access to uncommitted data. - Strategy::kura_store_block(&self.kura, committed_block); + Strategy::kura_store_block(&self.kura, block); // Update WSV copy that is public facing self.public_wsv_sender @@ -401,13 +338,13 @@ impl Sumeragi { } }); - // This sends "Block committed" event, so it should be done - // AFTER public facing WSV update + // NOTE: This sends "Block committed" event, + // so it should be done AFTER public facing WSV update self.send_events(events); + self.current_topology = new_topology; + self.connect_peers(&self.current_topology); - self.update_topology(&block_signees, topology); - - self.cache_transaction() + self.cache_transaction(); } fn update_params(&mut self) { @@ -435,21 +372,14 @@ fn suggest_view_change( view_change_proof_chain: &mut ProofChain, current_view_change_index: u64, ) { - let suspect_proof = { - let mut proof = Proof { - latest_block_hash: sumeragi.wsv.latest_block_hash(), - view_change_index: current_view_change_index, - signatures: Vec::new(), - }; - proof + let suspect_proof = + ProofBuilder::new(sumeragi.wsv.latest_block_hash(), current_view_change_index) .sign(sumeragi.key_pair.clone()) .expect("Proof signing failed"); - proof - }; view_change_proof_chain .insert_proof( - &sumeragi.current_topology.sorted_peers, + &sumeragi.current_topology.ordered_peers, sumeragi.current_topology.max_faults(), sumeragi.wsv.latest_block_hash(), suspect_proof, @@ -466,7 +396,7 @@ fn prune_view_change_proofs_and_calculate_current_index( ) -> u64 { view_change_proof_chain.prune(sumeragi.wsv.latest_block_hash()); view_change_proof_chain.verify_with_state( - &sumeragi.current_topology.sorted_peers, + &sumeragi.current_topology.ordered_peers, sumeragi.current_topology.max_faults(), sumeragi.wsv.latest_block_hash(), ) as u64 @@ -479,7 +409,7 @@ fn handle_message( voting_block: &mut Option, current_view_change_index: u64, view_change_proof_chain: &mut ProofChain, - voting_signatures: &mut Vec>, + voting_signatures: &mut Vec>, ) { let current_topology = &sumeragi.current_topology; let role = current_topology.role(&sumeragi.peer_id); @@ -501,21 +431,24 @@ fn handle_message( peer_latest_block_hash=?sumeragi.wsv.latest_block_hash(), peer_latest_block_view_change_index=?sumeragi.wsv.latest_block_view_change_index(), consensus_latest_block_hash=%block.hash(), - consensus_latest_block_view_change_index=%block.as_v1().header.view_change_index, + consensus_latest_block_view_change_index=%block.payload().header.view_change_index, "Soft fork occurred: peer in inconsistent state. Rolling back and replacing top block." ); sumeragi.replace_top_block(block, new_wsv) } - Err(BlockSyncError::BlockNotValid(error)) => { + Err((_, BlockSyncError::BlockNotValid(error))) => { error!(%addr, %role, %block_hash, ?error, "Block not valid.") } - Err(BlockSyncError::SoftForkBlockNotValid(error)) => { + Err((_, BlockSyncError::SoftForkBlockNotValid(error))) => { error!(%addr, %role, %block_hash, ?error, "Soft-fork block not valid.") } - Err(BlockSyncError::SoftForkBlockSmallViewChangeIndex { - peer_view_change_index, - block_view_change_index, - }) => { + Err(( + _, + BlockSyncError::SoftForkBlockSmallViewChangeIndex { + peer_view_change_index, + block_view_change_index, + }, + )) => { debug!( %addr, %role, peer_latest_block_hash=?sumeragi.wsv.latest_block_hash(), @@ -525,10 +458,13 @@ fn handle_message( "Soft fork doesn't occurred: block has the same or smaller view change index" ); } - Err(BlockSyncError::BlockNotProperHeight { - peer_height, - block_height, - }) => { + Err(( + _, + BlockSyncError::BlockNotProperHeight { + peer_height, + block_height, + }, + )) => { warn!(%addr, %role, %block_hash, %block_height, %peer_height, "Other peer send irrelevant or outdated block to the peer (it's neither `peer_height` nor `peer_height + 1`).") } } @@ -542,21 +478,19 @@ fn handle_message( || role == Role::Leader && !is_consensus_required { error!(%addr, %role, "Received BlockCommitted message, but shouldn't"); - } else if let Some(mut voted_block) = voting_block.take() { - let voting_block_hash = voted_block.block.partial_hash(); - - if hash.internal == voting_block_hash.into() { - // The manipulation of the topology relies upon all peers seeing the same signature set. - // Therefore we must clear the signatures and accept what the proxy tail giveth. - voted_block.block.signatures.clear(); - add_signatures::(&mut voted_block, signatures.transmute()); + } else if let Some(voted_block) = voting_block.take() { + let voting_block_hash = voted_block.block.payload().hash(); - match voted_block.block.commit(current_topology) { + if hash == voting_block_hash { + match voted_block + .block + .commit_with_signatures(current_topology, signatures) + { Ok(committed_block) => { sumeragi.commit_block(committed_block, voted_block.new_wsv) } - Err((_, err)) => { - error!(%addr, %role, %hash, ?err, "Block failed to be committed") + Err((_, error)) => { + error!(%addr, %role, %hash, ?error, "Block failed to be committed") } }; } else { @@ -576,12 +510,12 @@ fn handle_message( .is_consensus_required() .expect("Peer has `ValidatingPeer` role, which mean that current topology require consensus"); - if let Some(v_block) = vote_for_block(sumeragi, block_created) { - let block_hash = v_block.block.partial_hash(); + if let Some(v_block) = vote_for_block(sumeragi, ¤t_topology, block_created) { + let block_hash = v_block.block.payload().hash(); let msg = MessagePacket::new( view_change_proof_chain.clone(), - Some(BlockSigned::from(&v_block.block).into()), + Some(BlockSigned::from(v_block.block.clone()).into()), ); sumeragi.broadcast_packet_to(msg, [current_topology.proxy_tail()]); @@ -595,13 +529,13 @@ fn handle_message( "Peer has `ObservingPeer` role, which mean that current topology require consensus", ); - if let Some(v_block) = vote_for_block(sumeragi, block_created) { + if let Some(v_block) = vote_for_block(sumeragi, ¤t_topology, block_created) { if current_view_change_index >= 1 { - let block_hash = v_block.block.partial_hash(); + let block_hash = v_block.block.payload().hash(); let msg = MessagePacket::new( view_change_proof_chain.clone(), - Some(BlockSigned::from(&v_block.block).into()), + Some(BlockSigned::from(v_block.block.clone()).into()), ); sumeragi.broadcast_packet_to(msg, [current_topology.proxy_tail()]); @@ -613,9 +547,7 @@ fn handle_message( } } (Message::BlockCreated(block_created), Role::ProxyTail) => { - // NOTE: False positive from nursery - #[allow(clippy::iter_with_drain)] - if let Some(mut new_block) = vote_for_block(sumeragi, block_created) { + if let Some(mut new_block) = vote_for_block(sumeragi, current_topology, block_created) { // NOTE: Up until this point it was unknown which block is expected to be received, // therefore all the signatures (of any hash) were collected and will now be pruned add_signatures::(&mut new_block, voting_signatures.drain(..)); @@ -633,7 +565,7 @@ fn handle_message( let valid_signatures = current_topology.filter_signatures_by_roles(roles, &signatures); if let Some(voted_block) = voting_block.as_mut() { - let voting_block_hash = voted_block.block.partial_hash(); + let voting_block_hash = voted_block.block.payload().hash(); if hash == voting_block_hash { add_signatures::(voted_block, valid_signatures); @@ -674,24 +606,28 @@ fn process_message_independent( if cache_full || (deadline_reached && cache_non_empty) { let transactions = sumeragi.transaction_cache.clone(); - info!(txns=%transactions.len(), "Creating block..."); + info!(%addr, txns=%transactions.len(), "Creating block..."); // TODO: properly process triggers! let mut new_wsv = sumeragi.wsv.clone(); let event_recommendations = Vec::new(); - let new_block = BlockBuilder { + let new_block = match BlockBuilder::new( transactions, + sumeragi.current_topology.clone(), event_recommendations, - view_change_index: current_view_change_index, - committed_with_topology: sumeragi.current_topology.clone(), - key_pair: sumeragi.key_pair.clone(), - wsv: &mut new_wsv, - } - .build(); + ) + .chain(current_view_change_index, &mut new_wsv) + .sign(sumeragi.key_pair.clone()) + { + Ok(block) => block, + Err(error) => { + error!(?error, "Failed to sign block"); + return; + } + }; - sumeragi.send_events(&new_block); if let Some(current_topology) = current_topology.is_consensus_required() { - info!(%addr, partial_hash=%new_block.partial_hash(), "Block created"); + info!(%addr, block_payload_hash=%new_block.payload().hash(), "Block created"); *voting_block = Some(VotingBlock::new(new_block.clone(), new_wsv)); let msg = MessagePacket::new( @@ -708,20 +644,13 @@ fn process_message_independent( Ok(committed_block) => { let msg = MessagePacket::new( view_change_proof_chain.clone(), - Some( - BlockCommitted::from( - Into::::into( - committed_block.clone(), - ), - ) - .into(), - ), + Some(BlockCommitted::from(committed_block.clone()).into()), ); sumeragi.broadcast_packet(msg); sumeragi.commit_block(committed_block, new_wsv); } - Err(err) => error!(%addr, role=%Role::Leader, ?err), + Err((_, error)) => error!(%addr, role=%Role::Leader, ?error), } } } @@ -738,12 +667,7 @@ fn process_message_independent( let msg = MessagePacket::new( view_change_proof_chain.clone(), - Some( - BlockCommitted::from(Into::::into( - committed_block.clone(), - )) - .into(), - ), + Some(BlockCommitted::from(committed_block.clone()).into()), ); let current_topology = current_topology @@ -767,7 +691,7 @@ fn process_message_independent( sumeragi.broadcast_packet_to( msg, current_topology - .sorted_peers + .ordered_peers .iter() .take(current_topology.min_votes_for_commit()), ); @@ -775,10 +699,10 @@ fn process_message_independent( } sumeragi.commit_block(committed_block, new_wsv); } - Err((block, err)) => { + Err((block, error)) => { // Restore the current voting block and continue the round *voting_block = Some(VotingBlock::voted_at(block, new_wsv, voted_at)); - trace!(?err, "Not enough signatures, waiting for more..."); + trace!(?error, "Not enough signatures, waiting for more..."); } } } @@ -787,25 +711,25 @@ fn process_message_independent( } } -// NOTE: False positive useless_let_if_seq from nursery -#[allow(clippy::too_many_arguments, clippy::useless_let_if_seq)] +#[allow(clippy::too_many_arguments)] fn reset_state( peer_id: &PeerId, pipeline_time: Duration, current_view_change_index: u64, old_view_change_index: &mut u64, - current_latest_block_height: u64, - old_latest_block_height: &mut u64, + old_latest_block_hash: &mut HashOf, + latest_block: &SignedBlock, // below is the state that gets reset. current_topology: &mut Topology, voting_block: &mut Option, - voting_signatures: &mut Vec>, + voting_signatures: &mut Vec>, round_start_time: &mut Instant, last_view_change_time: &mut Instant, view_change_time: &mut Duration, ) { let mut was_commit_or_view_change = false; - if current_latest_block_height != *old_latest_block_height { + let current_latest_block_hash = latest_block.hash(); + if current_latest_block_hash != *old_latest_block_hash { // Round is only restarted on a block commit, so that in the case of // a view change a new block is immediately created by the leader *round_start_time = Instant::now(); @@ -813,16 +737,21 @@ fn reset_state( *old_view_change_index = 0; } - while *old_view_change_index < current_view_change_index { - *old_view_change_index += 1; + if *old_view_change_index < current_view_change_index { error!(addr=%peer_id.address, "Rotating the entire topology."); - current_topology.rotate_all(); + *old_view_change_index = current_view_change_index; was_commit_or_view_change = true; } // Reset state for the next round. if was_commit_or_view_change { - *old_latest_block_height = current_latest_block_height; + *old_latest_block_hash = current_latest_block_hash; + + *current_topology = Topology::recreate_topology( + latest_block, + current_view_change_index, + current_topology.ordered_peers.iter().cloned().collect(), + ); *voting_block = None; voting_signatures.clear(); @@ -884,7 +813,11 @@ pub(crate) fn run( let mut should_sleep = false; let mut view_change_proof_chain = ProofChain::default(); let mut old_view_change_index = 0; - let mut old_latest_block_height = 0; + let mut old_latest_block_hash = sumeragi + .wsv + .latest_block_ref() + .expect("WSV must have blocks") + .hash(); // Duration after which a view change is suggested let mut view_change_time = sumeragi.pipeline_time(); // Instant when the current round started @@ -908,13 +841,7 @@ pub(crate) fn run( sumeragi .transaction_cache // Checking if transactions are in the blockchain is costly - .retain(|tx| { - let expired = sumeragi.queue.is_expired(tx); - if expired { - debug!(?tx, "Transaction expired") - } - expired - }); + .retain(|tx| !sumeragi.queue.is_expired(tx)); let mut expired_transactions = Vec::new(); sumeragi.queue.get_transactions_for_block( @@ -923,12 +850,7 @@ pub(crate) fn run( &mut sumeragi.transaction_cache, &mut expired_transactions, ); - sumeragi.send_events( - expired_transactions - .iter() - .map(expired_event) - .collect::>(), - ); + sumeragi.send_events(expired_transactions.iter().map(expired_event)); let current_view_change_index = prune_view_change_proofs_and_calculate_current_index( &sumeragi, @@ -940,8 +862,11 @@ pub(crate) fn run( sumeragi.pipeline_time(), current_view_change_index, &mut old_view_change_index, - sumeragi.wsv.height(), - &mut old_latest_block_height, + &mut old_latest_block_hash, + &sumeragi + .wsv + .latest_block_ref() + .expect("WSV must have blocks"), &mut sumeragi.current_topology, &mut voting_block, &mut voting_signatures, @@ -956,7 +881,7 @@ pub(crate) fn run( if let Some(VotingBlock { block, .. }) = voting_block.as_ref() { // NOTE: Suspecting the tail node because it hasn't yet committed a block produced by leader - warn!(peer_public_key=%sumeragi.peer_id.public_key, %role, block=%block.partial_hash(), "Block not committed in due time, requesting view change..."); + warn!(peer_public_key=%sumeragi.peer_id.public_key, %role, block=%block.payload().hash(), "Block not committed in due time, requesting view change..."); } else { // NOTE: Suspecting the leader node because it hasn't produced a block // If the current node has a transaction, the leader should have as well @@ -995,6 +920,30 @@ pub(crate) fn run( }, ); + // State could be changed after handling message so it is necessary to reset state before handling message independent step + let current_view_change_index = prune_view_change_proofs_and_calculate_current_index( + &sumeragi, + &mut view_change_proof_chain, + ); + + reset_state( + &sumeragi.peer_id, + sumeragi.pipeline_time(), + current_view_change_index, + &mut old_view_change_index, + &mut old_latest_block_hash, + &sumeragi + .wsv + .latest_block_ref() + .expect("WSV must have blocks"), + &mut sumeragi.current_topology, + &mut voting_block, + &mut voting_signatures, + &mut round_start_time, + &mut last_view_change_time, + &mut view_change_time, + ); + process_message_independent( &mut sumeragi, &mut voting_block, @@ -1008,16 +957,16 @@ pub(crate) fn run( fn add_signatures( block: &mut VotingBlock, - signatures: impl IntoIterator>, + signatures: impl IntoIterator>, ) { for signature in signatures { - if let Err(err) = block.block.add_signature(signature) { + if let Err(error) = block.block.add_signature(signature) { let err_msg = "Signature not valid"; if EXPECT_VALID { - error!(?err, err_msg); + error!(?error, err_msg); } else { - debug!(?err, err_msg); + debug!(?error, err_msg); } } } @@ -1037,54 +986,27 @@ fn expired_event(txn: &AcceptedTransaction) -> Event { fn vote_for_block( sumeragi: &Sumeragi, + topology: &Topology, BlockCreated { block }: BlockCreated, ) -> Option { - let block_hash = block.partial_hash(); + let block_hash = block.payload().hash(); let addr = &sumeragi.peer_id.address; let role = sumeragi.current_topology.role(&sumeragi.peer_id); trace!(%addr, %role, block_hash=%block_hash, "Block received, voting..."); let mut new_wsv = sumeragi.wsv.clone(); - let mut block = { - let span = span!(Level::TRACE, "block revalidation"); - let _enter = span.enter(); - - match block.revalidate(&mut new_wsv) { - Ok(()) => block, - Err(err) => { - warn!(%addr, %role, ?err); - return None; - } + let block = match ValidBlock::validate(block, topology, &mut new_wsv) { + Ok(block) => block, + Err((_, error)) => { + warn!(%addr, %role, ?error, "Block validation failed"); + return None; } }; - if sumeragi - .current_topology - .filter_signatures_by_roles(&[Role::Leader], block.retain_verified_signatures()) - .is_empty() - { - error!( - %addr, %role, leader=?sumeragi.current_topology.is_non_empty().map(|topology| &topology.leader().address), hash=%block.partial_hash(), - "The block is rejected as it is not signed by the leader." - ); - - return None; - } - - if block.header.committed_with_topology != sumeragi.current_topology.sorted_peers { - error!( - %addr, %role, block_topology=?block.header.committed_with_topology, my_topology=?sumeragi.current_topology, hash=%block.partial_hash(), - "The block is rejected as because the topology field is incorrect." - ); - - return None; - } - let signed_block = block .sign(sumeragi.key_pair.clone()) .expect("Block signing failed"); - sumeragi.send_events(&signed_block); Some(VotingBlock::new(signed_block, new_wsv)) } @@ -1125,7 +1047,7 @@ trait ApplyBlockStrategy { fn before_update_hook(sumeragi: &mut Sumeragi); /// Operation to invoke in kura to store block. - fn kura_store_block(kura: &Kura, block: VersionedCommittedBlock); + fn kura_store_block(kura: &Kura, block: CommittedBlock); } /// Commit new block strategy. Used during normal consensus rounds. @@ -1142,7 +1064,7 @@ impl ApplyBlockStrategy for NewBlockStrategy { } #[inline] - fn kura_store_block(kura: &Kura, block: VersionedCommittedBlock) { + fn kura_store_block(kura: &Kura, block: CommittedBlock) { kura.store_block(block) } } @@ -1159,20 +1081,20 @@ impl ApplyBlockStrategy for ReplaceTopBlockStrategy { } #[inline] - fn kura_store_block(kura: &Kura, block: VersionedCommittedBlock) { + fn kura_store_block(kura: &Kura, block: CommittedBlock) { kura.replace_top_block(block) } } enum BlockSyncOk { - CommitBlock(VersionedCommittedBlock, WorldStateView), - ReplaceTopBlock(VersionedCommittedBlock, WorldStateView), + CommitBlock(CommittedBlock, WorldStateView), + ReplaceTopBlock(CommittedBlock, WorldStateView), } #[derive(Debug)] enum BlockSyncError { - BlockNotValid(BlockRevalidationError), - SoftForkBlockNotValid(BlockRevalidationError), + BlockNotValid(BlockValidationError), + SoftForkBlockNotValid(BlockValidationError), SoftForkBlockSmallViewChangeIndex { peer_view_change_index: u64, block_view_change_index: u64, @@ -1184,249 +1106,321 @@ enum BlockSyncError { } fn handle_block_sync( - block: VersionedCommittedBlock, + block: SignedBlock, wsv: &WorldStateView, finalized_wsv: &WorldStateView, -) -> Result { - let block_height = block.as_v1().header.height; +) -> Result { + let block_height = block.payload().header.height; let wsv_height = wsv.height(); if wsv_height + 1 == block_height { // Normal branch for adding new block on top of current let mut new_wsv = wsv.clone(); - block - .revalidate(&mut new_wsv) - .map(|_| BlockSyncOk::CommitBlock(block, new_wsv)) - .map_err(BlockSyncError::BlockNotValid) + let topology = { + let last_committed_block = new_wsv + .latest_block_ref() + .expect("Not in genesis round so must have at least genesis block"); + let new_peers = new_wsv.peers_ids().clone(); + let view_change_index = block.payload().header().view_change_index; + Topology::recreate_topology(&last_committed_block, view_change_index, new_peers) + }; + ValidBlock::validate(block, &topology, &mut new_wsv) + .and_then(|block| { + block + .commit(&topology) + .map_err(|(block, err)| (block.into(), err)) + }) + .map(|block| BlockSyncOk::CommitBlock(block, new_wsv)) + .map_err(|(block, error)| (block, BlockSyncError::BlockNotValid(error))) } else if wsv_height == block_height && block_height > 1 { // Soft-fork on genesis block isn't possible // Soft fork branch for replacing current block with valid one let mut new_wsv = finalized_wsv.clone(); - block - .revalidate(&mut new_wsv) - .map_err(BlockSyncError::SoftForkBlockNotValid) - .and_then(|_| { + let topology = { + let last_committed_block = new_wsv + .latest_block_ref() + .expect("Not in genesis round so must have at least genesis block"); + let new_peers = new_wsv.peers_ids().clone(); + let view_change_index = block.payload().header().view_change_index; + Topology::recreate_topology(&last_committed_block, view_change_index, new_peers) + }; + ValidBlock::validate(block, &topology, &mut new_wsv) + .and_then(|block| { + block + .commit(&topology) + .map_err(|(block, err)| (block.into(), err)) + }) + .map_err(|(block, error)| (block, BlockSyncError::SoftForkBlockNotValid(error))) + .and_then(|block| { let peer_view_change_index = wsv.latest_block_view_change_index(); - let block_view_change_index = block.as_v1().header.view_change_index; + let block_view_change_index = block.payload().header.view_change_index; if peer_view_change_index < block_view_change_index { Ok(BlockSyncOk::ReplaceTopBlock(block, new_wsv)) } else { - Err(BlockSyncError::SoftForkBlockSmallViewChangeIndex { - peer_view_change_index, - block_view_change_index, - }) + Err(( + block.into(), + BlockSyncError::SoftForkBlockSmallViewChangeIndex { + peer_view_change_index, + block_view_change_index, + }, + )) } }) } else { // Error branch other peer send irrelevant block - Err(BlockSyncError::BlockNotProperHeight { - peer_height: wsv_height, - block_height, - }) + Err(( + block, + BlockSyncError::BlockNotProperHeight { + peer_height: wsv_height, + block_height, + }, + )) } } #[cfg(test)] mod tests { - use std::str::FromStr; + use iroha_primitives::{unique_vec, unique_vec::UniqueVec}; + use tokio::test; use super::*; - use crate::smartcontracts::Registrable; + use crate::{query::store::LiveQueryStore, smartcontracts::Registrable}; - fn create_data_for_test() -> (WorldStateView, Arc, VersionedCommittedBlock) { + fn create_data_for_test( + topology: &Topology, + leader_key_pair: KeyPair, + ) -> (WorldStateView, Arc, SignedBlock) { // Predefined world state - let alice_id = AccountId::from_str("alice@wonderland").expect("Valid"); + let alice_id: AccountId = "alice@wonderland".parse().expect("Valid"); let alice_keys = KeyPair::generate().expect("Valid"); let account = Account::new(alice_id.clone(), [alice_keys.public_key().clone()]).build(&alice_id); - let domain_id = DomainId::from_str("wonderland").expect("Valid"); + let domain_id = "wonderland".parse().expect("Valid"); let mut domain = Domain::new(domain_id).build(&alice_id); assert!(domain.add_account(account).is_none()); - let world = World::with([domain], Vec::new()); + let world = World::with([domain], topology.ordered_peers.clone()); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(world, Arc::clone(&kura)); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(world, Arc::clone(&kura), query_handle); // Create "genesis" block // Creating an instruction - let fail_box: InstructionBox = FailBox::new("Dummy isi").into(); + let fail_box: InstructionExpr = Fail::new("Dummy isi").into(); // Making two transactions that have the same instruction let tx = TransactionBuilder::new(alice_id.clone()) .with_instructions([fail_box]) .sign(alice_keys.clone()) .expect("Valid"); - let tx: AcceptedTransaction = - AcceptedTransaction::accept(tx, &wsv.transaction_validator().transaction_limits) - .map(Into::into) - .expect("Valid"); + let tx = AcceptedTransaction::accept(tx, &wsv.transaction_executor().transaction_limits) + .expect("Valid"); // Creating a block of two identical transactions and validating it - let transactions = vec![tx.clone(), tx]; - let block = BlockBuilder { - transactions, - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: Topology::new(Vec::new()), - key_pair: alice_keys.clone(), - wsv: &mut wsv.clone(), - } - .build(); - - let genesis_block = VersionedCommittedBlock::from(block.commit_unchecked()); + let block = BlockBuilder::new(vec![tx.clone(), tx], topology.clone(), Vec::new()) + .chain(0, &mut wsv) + .sign(leader_key_pair.clone()) + .expect("Block is valid"); - kura.store_block(genesis_block.clone()); - wsv.apply(&genesis_block).expect("Failed to apply block"); - - // Create block for testing purposes - // Creating an instruction - let asset_definition_id = AssetDefinitionId::from_str("xor#wonderland").expect("Valid"); - let create_asset_definition: InstructionBox = - RegisterBox::new(AssetDefinition::quantity(asset_definition_id)).into(); + let genesis = block.commit(topology).expect("Block is valid"); + wsv.apply(&genesis).expect("Failed to apply block"); + kura.store_block(genesis); // Making two transactions that have the same instruction - let tx = TransactionBuilder::new(alice_id) - .with_instructions([create_asset_definition]) + let create_asset_definition1 = RegisterExpr::new(AssetDefinition::quantity( + "xor1#wonderland".parse().expect("Valid"), + )); + let create_asset_definition2 = RegisterExpr::new(AssetDefinition::quantity( + "xor2#wonderland".parse().expect("Valid"), + )); + + let tx1 = TransactionBuilder::new(alice_id.clone()) + .with_instructions([create_asset_definition1]) .sign(alice_keys.clone()) .expect("Valid"); - let tx: AcceptedTransaction = - AcceptedTransaction::accept(tx, &wsv.transaction_validator().transaction_limits) - .map(Into::into) - .expect("Valid"); + let tx1 = AcceptedTransaction::accept(tx1, &wsv.transaction_executor().transaction_limits) + .map(Into::into) + .expect("Valid"); + let tx2 = TransactionBuilder::new(alice_id) + .with_instructions([create_asset_definition2]) + .sign(alice_keys) + .expect("Valid"); + let tx2 = AcceptedTransaction::accept(tx2, &wsv.transaction_executor().transaction_limits) + .map(Into::into) + .expect("Valid"); // Creating a block of two identical transactions and validating it - let transactions = vec![tx.clone(), tx]; - let block = BlockBuilder { - transactions, - event_recommendations: Vec::new(), - view_change_index: 0, - committed_with_topology: Topology::new(Vec::new()), - key_pair: alice_keys, - wsv: &mut wsv.clone(), - } - .build(); - - let block = VersionedCommittedBlock::from(block.commit_unchecked()); + let block = BlockBuilder::new(vec![tx1, tx2], topology.clone(), Vec::new()) + .chain(0, &mut wsv.clone()) + .sign(leader_key_pair) + .expect("Block is valid"); - (wsv, kura, block) + (wsv, kura, block.into()) } #[test] #[allow(clippy::redundant_clone)] - fn block_sync_invalid_block() { - let (finalized_wsv, _, mut block) = create_data_for_test(); + async fn block_sync_invalid_block() { + let leader_key_pair = KeyPair::generate().unwrap(); + let topology = Topology::new(unique_vec![PeerId::new( + &"127.0.0.1:8080".parse().unwrap(), + leader_key_pair.public_key(), + )]); + let (finalized_wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); let wsv = finalized_wsv.clone(); // Malform block to make it invalid - block.as_mut_v1().transactions.clear(); + block.payload_mut().commit_topology.clear(); let result = handle_block_sync(block, &wsv, &finalized_wsv); - assert!(matches!(result, Err(BlockSyncError::BlockNotValid(_)))) + assert!(matches!(result, Err((_, BlockSyncError::BlockNotValid(_))))) } #[test] - fn block_sync_invalid_soft_fork_block() { - let (finalized_wsv, kura, mut block) = create_data_for_test(); + async fn block_sync_invalid_soft_fork_block() { + let leader_key_pair = KeyPair::generate().unwrap(); + let topology = Topology::new(unique_vec![PeerId::new( + &"127.0.0.1:8080".parse().unwrap(), + leader_key_pair.public_key(), + )]); + let (finalized_wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); let mut wsv = finalized_wsv.clone(); - kura.store_block(block.clone()); - wsv.apply(&block).expect("Failed to apply block"); + let validated_block = ValidBlock::validate(block.clone(), &topology, &mut wsv).unwrap(); + let committed_block = validated_block.commit(&topology).expect("Block is valid"); + wsv.apply_without_execution(&committed_block) + .expect("Failed to apply block"); + kura.store_block(committed_block); // Malform block to make it invalid - block.as_mut_v1().transactions.clear(); + block.payload_mut().commit_topology.clear(); let result = handle_block_sync(block, &wsv, &finalized_wsv); assert!(matches!( result, - Err(BlockSyncError::SoftForkBlockNotValid(_)) + Err((_, BlockSyncError::SoftForkBlockNotValid(_))) )) } #[test] #[allow(clippy::redundant_clone)] - fn block_sync_not_proper_height() { - let (finalized_wsv, _, mut block) = create_data_for_test(); + async fn block_sync_not_proper_height() { + let topology = Topology::new(UniqueVec::new()); + let leader_key_pair = KeyPair::generate().unwrap(); + let (finalized_wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); let wsv = finalized_wsv.clone(); // Change block height - block.as_mut_v1().header.height = 42; + block.payload_mut().header.height = 42; let result = handle_block_sync(block, &wsv, &finalized_wsv); assert!(matches!( result, - Err(BlockSyncError::BlockNotProperHeight { - peer_height: 1, - block_height: 42 - }) + Err(( + _, + BlockSyncError::BlockNotProperHeight { + peer_height: 1, + block_height: 42 + } + )) )) } #[test] #[allow(clippy::redundant_clone)] - fn block_sync_commit_block() { - let (finalized_wsv, _, block) = create_data_for_test(); + async fn block_sync_commit_block() { + let leader_key_pair = KeyPair::generate().unwrap(); + let topology = Topology::new(unique_vec![PeerId::new( + &"127.0.0.1:8080".parse().unwrap(), + leader_key_pair.public_key(), + )]); + let (finalized_wsv, _, block) = create_data_for_test(&topology, leader_key_pair); let wsv = finalized_wsv.clone(); let result = handle_block_sync(block, &wsv, &finalized_wsv); assert!(matches!(result, Ok(BlockSyncOk::CommitBlock(_, _)))) } #[test] - fn block_sync_replace_top_block() { - let (finalized_wsv, kura, mut block) = create_data_for_test(); + async fn block_sync_replace_top_block() { + let leader_key_pair = KeyPair::generate().unwrap(); + let topology = Topology::new(unique_vec![PeerId::new( + &"127.0.0.1:8080".parse().unwrap(), + leader_key_pair.public_key(), + )]); + let (finalized_wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); let mut wsv = finalized_wsv.clone(); - kura.store_block(block.clone()); - wsv.apply(&block).expect("Failed to apply block to wsv"); + let validated_block = ValidBlock::validate(block.clone(), &topology, &mut wsv).unwrap(); + let committed_block = validated_block.commit(&topology).expect("Block is valid"); + wsv.apply_without_execution(&committed_block) + .expect("Failed to apply block"); + kura.store_block(committed_block); assert_eq!(wsv.latest_block_view_change_index(), 0); // Increase block view change index - block.as_mut_v1().header.view_change_index = 42; + block.payload_mut().header.view_change_index = 42; let result = handle_block_sync(block, &wsv, &finalized_wsv); assert!(matches!(result, Ok(BlockSyncOk::ReplaceTopBlock(_, _)))) } #[test] - fn block_sync_small_view_change_index() { - let (finalized_wsv, kura, mut block) = create_data_for_test(); + async fn block_sync_small_view_change_index() { + let leader_key_pair = KeyPair::generate().unwrap(); + let topology = Topology::new(unique_vec![PeerId::new( + &"127.0.0.1:8080".parse().unwrap(), + leader_key_pair.public_key(), + )]); + let (finalized_wsv, kura, mut block) = create_data_for_test(&topology, leader_key_pair); let mut wsv = finalized_wsv.clone(); // Increase block view change index - block.as_mut_v1().header.view_change_index = 42; + block.payload_mut().header.view_change_index = 42; - kura.store_block(block.clone()); - wsv.apply(&block).expect("Failed to apply block to wsv"); + let validated_block = ValidBlock::validate(block.clone(), &topology, &mut wsv).unwrap(); + let committed_block = validated_block.commit(&topology).expect("Block is valid"); + wsv.apply_without_execution(&committed_block) + .expect("Failed to apply block"); + kura.store_block(committed_block); assert_eq!(wsv.latest_block_view_change_index(), 42); // Decrease block view change index back - block.as_mut_v1().header.view_change_index = 0; + block.payload_mut().header.view_change_index = 0; let result = handle_block_sync(block, &wsv, &finalized_wsv); assert!(matches!( result, - Err(BlockSyncError::SoftForkBlockSmallViewChangeIndex { - peer_view_change_index: 42, - block_view_change_index: 0 - }) + Err(( + _, + BlockSyncError::SoftForkBlockSmallViewChangeIndex { + peer_view_change_index: 42, + block_view_change_index: 0 + } + )) )) } #[test] #[allow(clippy::redundant_clone)] - fn block_sync_genesis_block_do_not_replace() { - let (finalized_wsv, _, mut block) = create_data_for_test(); + async fn block_sync_genesis_block_do_not_replace() { + let topology = Topology::new(UniqueVec::new()); + let leader_key_pair = KeyPair::generate().unwrap(); + let (finalized_wsv, _, mut block) = create_data_for_test(&topology, leader_key_pair); let wsv = finalized_wsv.clone(); // Change block height and view change index // Soft-fork on genesis block is not possible - block.as_mut_v1().header.height = 1; - block.as_mut_v1().header.view_change_index = 42; + block.payload_mut().header.view_change_index = 42; + block.payload_mut().header.height = 1; let result = handle_block_sync(block, &wsv, &finalized_wsv); assert!(matches!( result, - Err(BlockSyncError::BlockNotProperHeight { - peer_height: 1, - block_height: 1, - }) + Err(( + _, + BlockSyncError::BlockNotProperHeight { + peer_height: 1, + block_height: 1, + } + )) )) } } diff --git a/core/src/sumeragi/message.rs b/core/src/sumeragi/message.rs index abe5e0619b4..5aa47890cdd 100644 --- a/core/src/sumeragi/message.rs +++ b/core/src/sumeragi/message.rs @@ -1,47 +1,13 @@ //! Contains message structures for p2p communication during consensus. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::module_name_repetitions -)] - -use iroha_crypto::{HashOf, SignatureOf, SignaturesOf}; -use iroha_data_model::block::VersionedCommittedBlock; +use iroha_crypto::{HashOf, SignaturesOf}; +use iroha_data_model::block::{BlockPayload, SignedBlock}; use iroha_macro::*; -use iroha_version::prelude::*; use parity_scale_codec::{Decode, Encode}; use super::view_change; -use crate::block::PendingBlock; - -declare_versioned_with_scale!(VersionedPacket 1..2, Debug, Clone, iroha_macro::FromVariant); - -impl VersionedPacket { - /// Convert `&`[`Self`] to V1 reference - pub const fn as_v1(&self) -> &MessagePacket { - match self { - Self::V1(v1) => v1, - } - } - - /// Convert `&mut` [`Self`] to V1 mutable reference - pub fn as_mut_v1(&mut self) -> &mut MessagePacket { - match self { - Self::V1(v1) => v1, - } - } - - /// Perform the conversion from [`Self`] to V1 - pub fn into_v1(self) -> MessagePacket { - match self { - Self::V1(v1) => v1, - } - } -} +use crate::block::{CommittedBlock, ValidBlock}; /// Helper structure, wrapping messages and view change proofs. -#[version_with_scale(version = 1, versioned_alias = "VersionedPacket")] #[derive(Debug, Clone, Decode, Encode)] pub struct MessagePacket { /// Proof of view change. As part of this message handling, all @@ -96,19 +62,14 @@ impl From for MessagePacket { #[non_exhaustive] pub struct BlockCreated { /// The corresponding block. - pub block: PendingBlock, + pub block: SignedBlock, } -impl From for BlockCreated { - fn from(block: PendingBlock) -> Self { - Self { block } - } -} - -impl BlockCreated { - /// Get hash of block. - pub fn hash(&self) -> HashOf { - self.block.partial_hash() +impl From for BlockCreated { + fn from(block: ValidBlock) -> Self { + Self { + block: block.into(), + } } } @@ -117,16 +78,19 @@ impl BlockCreated { #[non_exhaustive] pub struct BlockSigned { /// Hash of the block being signed. - pub hash: HashOf, + pub hash: HashOf, /// Set of signatures. - pub signatures: SignaturesOf, + pub signatures: SignaturesOf, } -impl From<&PendingBlock> for BlockSigned { - fn from(block: &PendingBlock) -> Self { +impl From for BlockSigned { + fn from(block: ValidBlock) -> Self { + let block_hash = block.payload().hash(); + let SignedBlock::V1(block) = block.into(); + Self { - hash: block.partial_hash(), - signatures: block.signatures.clone(), + hash: block_hash, + signatures: block.signatures, } } } @@ -136,21 +100,19 @@ impl From<&PendingBlock> for BlockSigned { #[non_exhaustive] pub struct BlockCommitted { /// Hash of the block being signed. - pub hash: iroha_data_model::block::PartialBlockHash, + pub hash: HashOf, /// Set of signatures. - pub signatures: SignaturesOf, + pub signatures: SignaturesOf, } -impl From for BlockCommitted { - fn from(block: VersionedCommittedBlock) -> Self { +impl From for BlockCommitted { + fn from(block: CommittedBlock) -> Self { + let block_hash = block.payload().hash(); + let SignedBlock::V1(block) = block.into(); + Self { - hash: block.partial_hash(), - signatures: block - .signatures() - .cloned() - .collect::>>() - .try_into() - .expect("Can't send a committed block message without signatures."), + hash: block_hash, + signatures: block.signatures, } } } @@ -160,18 +122,11 @@ impl From for BlockCommitted { #[non_exhaustive] pub struct BlockSyncUpdate { /// The corresponding block. - pub block: VersionedCommittedBlock, + pub block: SignedBlock, } -impl From for BlockSyncUpdate { - fn from(block: VersionedCommittedBlock) -> Self { +impl From for BlockSyncUpdate { + fn from(block: SignedBlock) -> Self { Self { block } } } - -impl BlockSyncUpdate { - /// Get hash of block. - pub fn hash(&self) -> HashOf { - self.block.hash() - } -} diff --git a/core/src/sumeragi/mod.rs b/core/src/sumeragi/mod.rs index 9e786c0ef1f..52951d205f8 100644 --- a/core/src/sumeragi/mod.rs +++ b/core/src/sumeragi/mod.rs @@ -1,11 +1,6 @@ //! Translates to Emperor. Consensus-related logic of Iroha. //! //! `Consensus` trait is now implemented only by `Sumeragi` for now. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use std::{ fmt::{self, Debug, Formatter}, sync::{mpsc, Arc}, @@ -22,7 +17,7 @@ use iroha_telemetry::metrics::Metrics; use network_topology::{Role, Topology}; use tokio::sync::watch; -use crate::{handler::ThreadHandler, kura::BlockCount}; +use crate::{block::ValidBlock, handler::ThreadHandler, kura::BlockCount}; pub mod main_loop; pub mod message; @@ -33,11 +28,9 @@ use parking_lot::Mutex; use self::{ message::{Message, *}, - view_change::{Proof, ProofChain}, -}; -use crate::{ - block::*, kura::Kura, prelude::*, queue::Queue, EventsSender, IrohaNetwork, NetworkMessage, + view_change::ProofChain, }; +use crate::{kura::Kura, prelude::*, queue::Queue, EventsSender, IrohaNetwork, NetworkMessage}; /* The values in the following struct are not atomics because the code that @@ -135,7 +128,7 @@ impl SumeragiHandle { block_index += 1; let mut block_txs_accepted = 0; let mut block_txs_rejected = 0; - for tx in &block.as_v1().transactions { + for tx in &block.payload().transactions { if tx.error.is_none() { block_txs_accepted += 1; } else { @@ -173,9 +166,12 @@ impl SumeragiHandle { #[allow(clippy::cast_possible_truncation)] if let Some(timestamp) = wsv.genesis_timestamp() { // this will overflow in 584942417years. - self.metrics - .uptime_since_genesis_ms - .set((current_time().as_millis() - timestamp) as u64) + self.metrics.uptime_since_genesis_ms.set( + (current_time() - timestamp) + .as_millis() + .try_into() + .expect("Timestamp should fit into u64"), + ) }; self.metrics.connected_peers.set(online_peers_count); @@ -211,11 +207,19 @@ impl SumeragiHandle { view_change_proofs: msg.view_change_proofs, }) { self.metrics.dropped_messages.inc(); - error!(?error, "This peer is faulty. Incoming control messages have to be dropped due to low processing speed."); + error!( + ?error, + "This peer is faulty. \ + Incoming control messages have to be dropped due to low processing speed." + ); } } else if let Err(error) = self.message_sender.try_send(msg) { self.metrics.dropped_messages.inc(); - error!(?error, "This peer is faulty. Incoming messages have to be dropped due to low processing speed."); + error!( + ?error, + "This peer is faulty. \ + Incoming messages have to be dropped due to low processing speed." + ); } } @@ -223,6 +227,7 @@ impl SumeragiHandle { /// /// # Panics /// May panic if something is of during initialization which is bug. + #[allow(clippy::too_many_lines)] pub fn start( SumeragiStartArgs { configuration, @@ -240,48 +245,72 @@ impl SumeragiHandle { let skip_block_count = wsv.block_hashes.len(); let mut blocks_iter = (skip_block_count + 1..=block_count).map(|block_height| { - kura.get_block_by_height(block_height as u64) - .expect("Sumeragi should be able to load the block that was reported as presented. If not, the block storage was probably disconnected.") + kura.get_block_by_height(block_height as u64).expect( + "Sumeragi should be able to load the block that was reported as presented. \ + If not, the block storage was probably disconnected.", + ) }); + let mut current_topology = { + assert!(!configuration.trusted_peers.peers.is_empty()); + Topology::new(configuration.trusted_peers.peers.clone()) + }; + let block_iter_except_last = (&mut blocks_iter).take(block_count.saturating_sub(skip_block_count + 1)); for block in block_iter_except_last { - block.revalidate(&mut wsv).expect( - "The block should be valid in init. Blocks loaded from kura assumed to be valid", + if wsv.height() == 0 { + current_topology = Topology::new(block.payload().commit_topology.clone()); + wsv.world_mut().trusted_peers_ids = current_topology.ordered_peers.clone(); + } + + let new_topology = + Topology::recreate_topology(&block, 0, wsv.peers_ids().iter().cloned().collect()); + + current_topology.rotate_all_n(block.payload().header().view_change_index); + + let block = ValidBlock::validate(Clone::clone(&block), ¤t_topology, &mut wsv) + .expect("Kura blocks should be valid") + .commit(¤t_topology) + .expect("Kura blocks should be valid"); + wsv.apply_without_execution(&block).expect( + "Block application in init should not fail. \ + Blocks loaded from kura assumed to be valid", ); - wsv.apply_without_execution(block.as_ref()) - .expect("Block application in init should not fail. Blocks loaded from kura assumed to be valid"); + current_topology = new_topology; } // finalized_wsv is one block behind let finalized_wsv = wsv.clone(); if let Some(latest_block) = blocks_iter.next() { - latest_block.revalidate(&mut wsv).expect( - "The block should be valid in init. Blocks loaded from kura assumed to be valid", + if wsv.height() == 0 { + current_topology = Topology::new(latest_block.payload().commit_topology.clone()); + wsv.world_mut().trusted_peers_ids = current_topology.ordered_peers.clone(); + } + + let new_topology = Topology::recreate_topology( + &latest_block, + 0, + wsv.peers_ids().iter().cloned().collect(), ); - wsv.apply_without_execution(latest_block.as_ref()) - .expect("Block application in init should not fail. Blocks loaded from kura assumed to be valid"); + + current_topology.rotate_all_n(latest_block.payload().header().view_change_index); + + let latest_block = + ValidBlock::validate(Clone::clone(&latest_block), ¤t_topology, &mut wsv) + .expect("Kura blocks should be valid") + .commit(¤t_topology) + .expect("Kura blocks should be valid"); + wsv.apply_without_execution(&latest_block).expect( + "Block application in init should not fail. \ + Blocks loaded from kura assumed to be valid", + ); + current_topology = new_topology; } info!("Sumeragi has finished loading blocks and setting up the WSV"); - let current_topology = match wsv.height() { - 0 => { - assert!(!configuration.trusted_peers.peers.is_empty()); - Topology::new(configuration.trusted_peers.peers.clone()) - } - height => { - let block_ref = kura.get_block_by_height(height).expect("Sumeragi could not load block that was reported as present. Please check that the block storage was not disconnected."); - let mut topology = Topology { - sorted_peers: block_ref.as_v1().header.committed_with_topology.clone(), - }; - topology.rotate_set_a(); - topology - } - }; - let (public_wsv_sender, public_wsv_receiver) = watch::channel(wsv.clone()); let (public_finalized_wsv_sender, public_finalized_wsv_receiver) = watch::channel(finalized_wsv.clone()); @@ -361,14 +390,14 @@ pub struct VotingBlock { /// At what time has this peer voted for this block pub voted_at: Instant, /// Valid Block - pub block: PendingBlock, + pub block: ValidBlock, /// WSV after applying transactions to it pub new_wsv: WorldStateView, } impl VotingBlock { /// Construct new `VotingBlock` with current time. - pub fn new(block: PendingBlock, new_wsv: WorldStateView) -> VotingBlock { + pub fn new(block: ValidBlock, new_wsv: WorldStateView) -> VotingBlock { VotingBlock { block, voted_at: Instant::now(), @@ -377,13 +406,13 @@ impl VotingBlock { } /// Construct new `VotingBlock` with the given time. pub(crate) fn voted_at( - block: PendingBlock, + block: ValidBlock, new_wsv: WorldStateView, voted_at: Instant, ) -> VotingBlock { VotingBlock { - block, voted_at, + block, new_wsv, } } diff --git a/core/src/sumeragi/network_topology.rs b/core/src/sumeragi/network_topology.rs index cca11ce4dc6..05e92157d3e 100644 --- a/core/src/sumeragi/network_topology.rs +++ b/core/src/sumeragi/network_topology.rs @@ -1,17 +1,11 @@ //! Structures formalising the peer topology (e.g. which peers have which predefined roles). -#![allow( - clippy::new_without_default, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects -)] - use std::collections::HashSet; use derive_more::Display; -use iroha_crypto::{HashOf, PublicKey, SignatureOf, SignaturesOf}; -use iroha_data_model::{block::VersionedCommittedBlock, prelude::PeerId}; +use iroha_crypto::{PublicKey, SignatureOf}; +use iroha_data_model::{block::SignedBlock, prelude::PeerId}; use iroha_logger::trace; +use iroha_primitives::unique_vec::UniqueVec; /// The ordering of the peers which defines their roles in the current round of consensus. /// @@ -27,7 +21,7 @@ use iroha_logger::trace; #[derive(Debug, Clone, PartialEq, Eq)] pub struct Topology { /// Current order of peers. The roles of peers are defined based on this order. - pub(crate) sorted_peers: Vec, + pub(crate) ordered_peers: UniqueVec, } /// Topology with at least one peer @@ -44,30 +38,30 @@ pub struct ConsensusTopology<'topology> { impl Topology { /// Create a new topology. - pub fn new(peers: impl IntoIterator) -> Self { + pub fn new(peers: UniqueVec) -> Self { Topology { - sorted_peers: peers.into_iter().collect(), + ordered_peers: peers, } } /// True, if the topology contains at least one peer and thus requires consensus pub fn is_non_empty(&self) -> Option { - (!self.sorted_peers.is_empty()).then_some(NonEmptyTopology { topology: self }) + (!self.ordered_peers.is_empty()).then_some(NonEmptyTopology { topology: self }) } /// Is consensus required, aka are there more than 1 peer. pub fn is_consensus_required(&self) -> Option { - (self.sorted_peers.len() > 1).then_some(ConsensusTopology { topology: self }) + (self.ordered_peers.len() > 1).then_some(ConsensusTopology { topology: self }) } /// How many faulty peers can this topology tolerate. pub fn max_faults(&self) -> usize { - (self.sorted_peers.len().saturating_sub(1)) / 3 + (self.ordered_peers.len().saturating_sub(1)) / 3 } /// The required amount of votes to commit a block with this topology. pub fn min_votes_for_commit(&self) -> usize { - let len = self.sorted_peers.len(); + let len = self.ordered_peers.len(); if len > 3 { self.max_faults() * 2 + 1 } else { @@ -75,13 +69,13 @@ impl Topology { } } - /// Index of leader among `sorted_peers` + /// Index of leader among `ordered_peers` #[allow(clippy::unused_self)] // In order to be consistent with `proxy_tail_index` method fn leader_index(&self) -> usize { 0 } - /// Index of leader among `sorted_peers` + /// Index of leader among `ordered_peers` fn proxy_tail_index(&self) -> usize { // NOTE: proxy tail is the last element from the set A so that's why it's `min_votes_for_commit - 1` self.min_votes_for_commit() - 1 @@ -94,7 +88,7 @@ impl Topology { roles: &[Role], signatures: I, ) -> Vec> { - let mut public_keys: HashSet<&PublicKey> = HashSet::with_capacity(self.sorted_peers.len()); + let mut public_keys: HashSet<&PublicKey> = HashSet::with_capacity(self.ordered_peers.len()); for role in roles { match (role, self.is_non_empty(), self.is_consensus_required()) { (Role::Leader, Some(topology), _) => { @@ -125,7 +119,7 @@ impl Topology { /// What role does this peer have in the topology. pub fn role(&self, peer_id: &PeerId) -> Role { - match self.sorted_peers.iter().position(|p| p == peer_id) { + match self.ordered_peers.iter().position(|p| p == peer_id) { Some(index) if index == self.leader_index() => Role::Leader, Some(index) if index < self.proxy_tail_index() => Role::ValidatingPeer, Some(index) if index == self.proxy_tail_index() => Role::ProxyTail, @@ -138,21 +132,24 @@ impl Topology { } /// Add or remove peers from the topology. - pub fn update_peer_list(&mut self, mut new_peers: HashSet) { - self.sorted_peers.retain(|peer| new_peers.remove(peer)); - self.sorted_peers.extend(new_peers); - } - - /// Rotate peers after each failed attempt to create a block. - pub fn rotate_all(&mut self) { - self.rotate_all_n(1); + pub fn update_peer_list(&mut self, new_peers: UniqueVec) { + self.modify_peers_directly(|peers| peers.retain(|peer| new_peers.contains(peer))); + self.ordered_peers.extend(new_peers); } /// Rotate peers n times where n is a number of failed attempt to create a block. - pub fn rotate_all_n(&mut self, n: usize) { - let len = self.sorted_peers.len(); + pub fn rotate_all_n(&mut self, n: u64) { + let len = self + .ordered_peers + .len() + .try_into() + .expect("`usize` should fit into `u64`"); if let Some(rem) = n.checked_rem(len) { - self.sorted_peers.rotate_left(rem); + let rem = rem.try_into().expect( + "`rem` is smaller than `usize::MAX`, because remainder is always smaller than divisor", + ); + + self.modify_peers_directly(|peers| peers.rotate_left(rem)); } } @@ -160,18 +157,19 @@ impl Topology { pub fn rotate_set_a(&mut self) { let rotate_at = self.min_votes_for_commit(); if rotate_at > 0 { - self.sorted_peers[..rotate_at].rotate_left(1); + self.modify_peers_directly(|peers| peers[..rotate_at].rotate_left(1)); } } /// Pull peers up in the topology to the top of the a set while preserving local order. pub fn lift_up_peers(&mut self, to_lift_up: &[PublicKey]) { - self.sorted_peers - .sort_by_cached_key(|peer| !to_lift_up.contains(&peer.public_key)); + self.modify_peers_directly(|peers| { + peers.sort_by_cached_key(|peer| !to_lift_up.contains(&peer.public_key)); + }); } /// Perform sequence of actions after block committed. - pub fn update_topology(&mut self, block_signees: &[PublicKey], new_peers: HashSet) { + pub fn update_topology(&mut self, block_signees: &[PublicKey], new_peers: UniqueVec) { self.lift_up_peers(block_signees); self.rotate_set_a(); self.update_peer_list(new_peers); @@ -179,13 +177,14 @@ impl Topology { /// Recreate topology for given block and view change index pub fn recreate_topology( - block: &VersionedCommittedBlock, - view_change_index: usize, - new_peers: HashSet, + block: &SignedBlock, + view_change_index: u64, + new_peers: UniqueVec, ) -> Self { - let mut topology = Topology::new(block.as_v1().header().committed_with_topology.clone()); + let mut topology = Topology::new(block.payload().commit_topology.clone()); let block_signees = block .signatures() + .into_iter() .map(|s| s.public_key()) .cloned() .collect::>(); @@ -198,94 +197,48 @@ impl Topology { topology } - /// Check if block's signatures meet requirements for given topology. - /// - /// In order for block to be considered valid there should be at least $2f + 1$ signatures (including proxy tail and leader signature) where f is maximum number of faulty nodes. - /// For further information please refer to the [whitepaper](docs/source/iroha_2_whitepaper.md) section 2.8 consensus. - /// - /// # Errors - /// - Not enough signatures - /// - Missing proxy tail signature - /// - Missing leader signature - pub fn verify_signatures( - &self, - signatures: &mut SignaturesOf, - hash: HashOf, - ) -> Result<(), SignatureVerificationError> { - if self.is_consensus_required().is_none() { - return Ok(()); - } - - let _ = signatures.retain_verified_by_hash(hash); - - let votes_count = self - .filter_signatures_by_roles( - &[ - Role::ValidatingPeer, - Role::Leader, - Role::ProxyTail, - Role::ObservingPeer, - ], - signatures.iter(), - ) - .len(); - let min_votes_for_commit = self.min_votes_for_commit(); - if votes_count < min_votes_for_commit { - return Err(SignatureVerificationError::NotEnoughSignatures { - votes_count, - min_votes_for_commit, - }); - } + /// Modify [`ordered_peers`](Self::ordered_peers) directly as [`Vec`]. + fn modify_peers_directly(&mut self, f: impl FnOnce(&mut Vec)) { + let unique_peers = std::mem::take(&mut self.ordered_peers); - if self - .filter_signatures_by_roles(&[Role::Leader], signatures.iter()) - .is_empty() - { - return Err(SignatureVerificationError::LeaderMissing); - } + let mut peers_vec = Vec::from(unique_peers); + f(&mut peers_vec); - if self - .filter_signatures_by_roles(&[Role::ProxyTail], signatures.iter()) - .is_empty() - { - return Err(SignatureVerificationError::ProxyTailMissing); - } - - Ok(()) + self.ordered_peers = UniqueVec::from_iter(peers_vec); } } impl<'topology> NonEmptyTopology<'topology> { /// Get leader's [`PeerId`]. pub fn leader(&self) -> &'topology PeerId { - &self.topology.sorted_peers[self.topology.leader_index()] + &self.topology.ordered_peers[self.topology.leader_index()] } } impl<'topology> ConsensusTopology<'topology> { /// Get proxy tail's peer id. pub fn proxy_tail(&self) -> &'topology PeerId { - &self.topology.sorted_peers[self.topology.proxy_tail_index()] + &self.topology.ordered_peers[self.topology.proxy_tail_index()] } /// Get leader's [`PeerId`] pub fn leader(&self) -> &'topology PeerId { - &self.topology.sorted_peers[self.topology.leader_index()] + &self.topology.ordered_peers[self.topology.leader_index()] } /// Get validating [`PeerId`]s. pub fn validating_peers(&self) -> &'topology [PeerId] { - &self.sorted_peers[self.leader_index() + 1..self.proxy_tail_index()] + &self.ordered_peers[self.leader_index() + 1..self.proxy_tail_index()] } /// Get observing [`PeerId`]s. pub fn observing_peers(&self) -> &'topology [PeerId] { - &self.sorted_peers[self.proxy_tail_index() + 1..] + &self.ordered_peers[self.proxy_tail_index() + 1..] } /// Get voting [`PeerId`]s. pub fn voting_peers(&self) -> &'topology [PeerId] { - &self.sorted_peers[self.leader_index()..=self.proxy_tail_index()] + &self.ordered_peers[self.leader_index()..=self.proxy_tail_index()] } } @@ -304,60 +257,42 @@ pub enum Role { Undefined, } -/// Error during signature verification -#[derive(thiserror::Error, displaydoc::Display, Debug, Clone, Copy, PartialEq, Eq)] -pub enum SignatureVerificationError { - /// The block doesn't have enough valid signatures to be committed ({votes_count} out of {min_votes_for_commit}) - NotEnoughSignatures { - /// Current number of signatures - votes_count: usize, - /// Minimal required number of signatures - min_votes_for_commit: usize, - }, - /// The block doesn't have proxy tail signature - ProxyTailMissing, - /// The block doesn't have leader signature - LeaderMissing, +#[cfg(test)] +macro_rules! test_peers { + ($($id:literal),+$(,)?) => {{ + let mut iter = ::core::iter::repeat_with(|| KeyPair::generate().expect("Failed to generate key pair")); + test_peers![$($id),*: iter] + }}; + ($($id:literal),+$(,)?: $key_pair_iter:expr) => { + ::iroha_primitives::unique_vec![ + $(PeerId::new(&(([0, 0, 0, 0], $id).into()), $key_pair_iter.next().expect("Not enough key pairs").public_key())),+ + ] + }; } +#[cfg(test)] +pub(crate) use test_peers; + #[cfg(test)] mod tests { - use iroha_crypto::{KeyPair, SignaturesOf}; + use iroha_crypto::KeyPair; + use iroha_primitives::unique_vec; use super::*; - macro_rules! peers { - ($($id:literal),+$(,)?) => {{ - let mut iter = core::iter::repeat_with(|| KeyPair::generate().expect("Failed to generate key pair")); - peers![$($id),*: iter] - }}; - ($($id:literal),+$(,)?: $key_pair_iter:expr) => { - vec![ - $(PeerId::new(&(([0, 0, 0, 0], $id).into()), $key_pair_iter.next().expect("Not enough key pairs").public_key())),+ - ] - }; - } - fn topology() -> Topology { - let peers = peers![0, 1, 2, 3, 4, 5, 6]; + let peers = test_peers![0, 1, 2, 3, 4, 5, 6]; Topology::new(peers) } fn extract_ports(topology: &Topology) -> Vec { topology - .sorted_peers + .ordered_peers .iter() .map(|peer| peer.address.port()) .collect() } - #[test] - fn rotate_all() { - let mut topology = topology(); - topology.rotate_all(); - assert_eq!(extract_ports(&topology), vec![1, 2, 3, 4, 5, 6, 0]) - } - #[test] fn rotate_set_a() { let mut topology = topology(); @@ -370,10 +305,10 @@ mod tests { let mut topology = topology(); // Will lift up 1, 2, 4, 6 let to_lift_up = &[ - topology.sorted_peers[1].public_key().clone(), - topology.sorted_peers[2].public_key().clone(), - topology.sorted_peers[4].public_key().clone(), - topology.sorted_peers[6].public_key().clone(), + topology.ordered_peers[1].public_key().clone(), + topology.ordered_peers[2].public_key().clone(), + topology.ordered_peers[4].public_key().clone(), + topology.ordered_peers[6].public_key().clone(), ]; topology.lift_up_peers(to_lift_up); assert_eq!(extract_ports(&topology), vec![1, 2, 4, 6, 0, 3, 5]) @@ -384,12 +319,12 @@ mod tests { let mut topology = topology(); // New peers will be 0, 2, 5, 7 let new_peers = { - let mut peers = HashSet::from([ - topology.sorted_peers[0].clone(), - topology.sorted_peers[5].clone(), - topology.sorted_peers[2].clone(), - ]); - peers.extend(peers![7]); + let mut peers = unique_vec![ + topology.ordered_peers[5].clone(), + topology.ordered_peers[0].clone(), + topology.ordered_peers[2].clone(), + ]; + peers.extend(test_peers![7]); peers }; topology.update_peer_list(new_peers); @@ -403,7 +338,7 @@ mod tests { .take(7) .collect::>(); let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; + let peers = test_peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; let topology = Topology::new(peers.clone()); let dummy = "value to sign"; @@ -445,7 +380,7 @@ mod tests { core::iter::repeat_with(|| KeyPair::generate().expect("Failed to generate key pair")) .take(7) .collect::>(); - let peers = Vec::new(); + let peers = UniqueVec::new(); let topology = Topology::new(peers); let dummy = "value to sign"; @@ -478,7 +413,7 @@ mod tests { .take(7) .collect::>(); let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0: key_pairs_iter]; + let peers = test_peers![0: key_pairs_iter]; let topology = Topology::new(peers.clone()); let dummy = "value to sign"; @@ -512,7 +447,7 @@ mod tests { .take(7) .collect::>(); let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0, 1: key_pairs_iter]; + let peers = test_peers![0, 1: key_pairs_iter]; let topology = Topology::new(peers.clone()); let dummy = "value to sign"; @@ -547,7 +482,7 @@ mod tests { .take(7) .collect::>(); let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0, 1, 2: key_pairs_iter]; + let peers = test_peers![0, 1, 2: key_pairs_iter]; let topology = Topology::new(peers.clone()); let dummy = "value to sign"; @@ -581,8 +516,8 @@ mod tests { #[test] fn roles() { - let peers = peers![0, 1, 2, 3, 4, 5, 6]; - let not_in_topology_peers = peers![7, 8, 9]; + let peers = test_peers![0, 1, 2, 3, 4, 5, 6]; + let not_in_topology_peers = test_peers![7, 8, 9]; let topology = Topology::new(peers.clone()); let expected_roles = [ Role::Leader, @@ -611,7 +546,7 @@ mod tests { #[test] fn proxy_tail() { - let peers = peers![0, 1, 2, 3, 4, 5, 6]; + let peers = test_peers![0, 1, 2, 3, 4, 5, 6]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -625,7 +560,7 @@ mod tests { #[test] fn proxy_tail_empty() { - let peers = Vec::new(); + let peers = UniqueVec::new(); let topology = Topology::new(peers); assert_eq!( @@ -639,7 +574,7 @@ mod tests { #[test] fn proxy_tail_1() { - let peers = peers![0]; + let peers = test_peers![0]; let topology = Topology::new(peers); assert_eq!( @@ -653,7 +588,7 @@ mod tests { #[test] fn proxy_tail_2() { - let peers = peers![0, 1]; + let peers = test_peers![0, 1]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -667,7 +602,7 @@ mod tests { #[test] fn proxy_tail_3() { - let peers = peers![0, 1, 2]; + let peers = test_peers![0, 1, 2]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -681,7 +616,7 @@ mod tests { #[test] fn leader() { - let peers = peers![0, 1, 2, 3, 4, 5, 6]; + let peers = test_peers![0, 1, 2, 3, 4, 5, 6]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -695,7 +630,7 @@ mod tests { #[test] fn leader_empty() { - let peers = Vec::new(); + let peers = UniqueVec::new(); let topology = Topology::new(peers); assert_eq!( @@ -709,7 +644,7 @@ mod tests { #[test] fn leader_1() { - let peers = peers![0]; + let peers = test_peers![0]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -723,7 +658,7 @@ mod tests { #[test] fn leader_2() { - let peers = peers![0, 1]; + let peers = test_peers![0, 1]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -737,7 +672,7 @@ mod tests { #[test] fn leader_3() { - let peers = peers![0, 1, 3]; + let peers = test_peers![0, 1, 3]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -751,7 +686,7 @@ mod tests { #[test] fn validating_peers() { - let peers = peers![0, 1, 2, 3, 4, 5, 6]; + let peers = test_peers![0, 1, 2, 3, 4, 5, 6]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -765,7 +700,7 @@ mod tests { #[test] fn validating_peers_empty() { - let peers = Vec::new(); + let peers = UniqueVec::new(); let topology = Topology::new(peers); assert_eq!( @@ -779,7 +714,7 @@ mod tests { #[test] fn validating_peers_1() { - let peers = peers![0]; + let peers = test_peers![0]; let topology = Topology::new(peers); assert_eq!( @@ -793,7 +728,7 @@ mod tests { #[test] fn validating_peers_2() { - let peers = peers![0, 1]; + let peers = test_peers![0, 1]; let topology = Topology::new(peers); let empty_peer_slice: &[PeerId] = &[]; @@ -808,7 +743,7 @@ mod tests { #[test] fn validating_peers_3() { - let peers = peers![0, 1, 2]; + let peers = test_peers![0, 1, 2]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -822,7 +757,7 @@ mod tests { #[test] fn observing_peers() { - let peers = peers![0, 1, 2, 3, 4, 5, 6]; + let peers = test_peers![0, 1, 2, 3, 4, 5, 6]; let topology = Topology::new(peers.clone()); assert_eq!( @@ -836,7 +771,7 @@ mod tests { #[test] fn observing_peers_empty() { - let peers = Vec::new(); + let peers = UniqueVec::new(); let topology = Topology::new(peers); assert_eq!( @@ -850,7 +785,7 @@ mod tests { #[test] fn observing_peers_1() { - let peers = peers![0]; + let peers = test_peers![0]; let topology = Topology::new(peers); assert_eq!( @@ -864,7 +799,7 @@ mod tests { #[test] fn observing_peers_2() { - let peers = peers![0, 1]; + let peers = test_peers![0, 1]; let topology = Topology::new(peers); let empty_peer_slice: &[PeerId] = &[]; @@ -879,7 +814,7 @@ mod tests { #[test] fn observing_peers_3() { - let peers = peers![0, 1, 2]; + let peers = test_peers![0, 1, 2]; let topology = Topology::new(peers); let empty_peer_slice: &[PeerId] = &[]; @@ -891,125 +826,4 @@ mod tests { Some(empty_peer_slice) ); } - - #[test] - fn signature_verification_ok() { - let key_pairs = - core::iter::repeat_with(|| KeyPair::generate().expect("Failed to generate key pair")) - .take(7) - .collect::>(); - let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; - let topology = Topology::new(peers); - - let dummy = "value to sign"; - let mut signatures = key_pairs - .iter() - .map(|key_pair| SignatureOf::new(key_pair.clone(), &dummy).expect("Failed to sign")) - .collect::, _>>() - .expect("Failed to create `SignaturesOf`"); - - assert_eq!( - topology.verify_signatures(&mut signatures, HashOf::new(&dummy)), - Ok(()) - ); - } - - #[test] - fn signature_verification_consensus_not_required_ok() { - let key_pairs = - core::iter::repeat_with(|| KeyPair::generate().expect("Failed to generate key pair")) - .take(1) - .collect::>(); - let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0,: key_pairs_iter]; - let topology = Topology::new(peers); - - let dummy = "value to sign"; - let mut signatures = key_pairs - .iter() - .enumerate() - .map(|(_, key_pair)| { - SignatureOf::new(key_pair.clone(), &dummy).expect("Failed to sign") - }) - .collect::, _>>() - .expect("Failed to create `SignaturesOf`"); - - let result = topology.verify_signatures(&mut signatures, HashOf::new(&dummy)); - assert_eq!(result, Ok(())) - } - - /// Check requirement of having at least $2f + 1$ signatures in $3f + 1$ network - #[test] - fn signature_verification_not_enough_signatures() { - let key_pairs = - core::iter::repeat_with(|| KeyPair::generate().expect("Failed to generate key pair")) - .take(7) - .collect::>(); - let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; - let topology = Topology::new(peers); - - let dummy = "value to sign"; - let mut signatures = SignatureOf::new(key_pairs[0].clone(), &dummy) - .expect("Failed to sign") - .into(); - - let result = topology.verify_signatures(&mut signatures, HashOf::new(&dummy)); - assert_eq!( - result, - Err(SignatureVerificationError::NotEnoughSignatures { - votes_count: 1, - min_votes_for_commit: topology.min_votes_for_commit(), - }) - ) - } - - /// Check requirement of having leader signature - #[test] - fn signature_verification_miss_leader_signature() { - let key_pairs = - core::iter::repeat_with(|| KeyPair::generate().expect("Failed to generate key pair")) - .take(7) - .collect::>(); - let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; - let topology = Topology::new(peers); - - let dummy = "value to sign"; - let mut signatures = key_pairs - .iter() - .enumerate() - .filter(|(i, _)| *i != 0) // Skip leader - .map(|(_, key_pair)| SignatureOf::new(key_pair.clone(), &dummy).expect("Failed to sign")) - .collect::, _>>() - .expect("Failed to create `SignaturesOf`"); - - let result = topology.verify_signatures(&mut signatures, HashOf::new(&dummy)); - assert_eq!(result, Err(SignatureVerificationError::LeaderMissing)) - } - - /// Check requirement of having leader signature - #[test] - fn signature_verification_miss_proxy_tail_signature() { - let key_pairs = - core::iter::repeat_with(|| KeyPair::generate().expect("Failed to generate key pair")) - .take(7) - .collect::>(); - let mut key_pairs_iter = key_pairs.iter(); - let peers = peers![0, 1, 2, 3, 4, 5, 6: key_pairs_iter]; - let topology = Topology::new(peers); - - let dummy = "value to sign"; - let mut signatures = key_pairs - .iter() - .enumerate() - .filter(|(i, _)| *i != 4) // Skip proxy tail - .map(|(_, key_pair)| SignatureOf::new(key_pair.clone(), &dummy).expect("Failed to sign")) - .collect::, _>>() - .expect("Failed to create `SignaturesOf`"); - - let result = topology.verify_signatures(&mut signatures, HashOf::new(&dummy)); - assert_eq!(result, Err(SignatureVerificationError::ProxyTailMissing)) - } } diff --git a/core/src/sumeragi/view_change.rs b/core/src/sumeragi/view_change.rs index 0f593aba4b8..11f24b90cfd 100644 --- a/core/src/sumeragi/view_change.rs +++ b/core/src/sumeragi/view_change.rs @@ -1,17 +1,11 @@ //! Structures related to proofs and reasons of view changes. //! Where view change is a process of changing topology due to some faulty network behavior. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - single_use_lifetimes -)] use std::collections::HashSet; use derive_more::{Deref, DerefMut}; use eyre::Result; -use iroha_crypto::{Hash, HashOf, KeyPair, PublicKey, Signature}; -use iroha_data_model::{block::VersionedCommittedBlock, prelude::PeerId}; +use iroha_crypto::{HashOf, KeyPair, PublicKey, SignatureOf, SignaturesOf}; +use iroha_data_model::{block::SignedBlock, prelude::PeerId}; use parity_scale_codec::{Decode, Encode}; use thiserror::Error; @@ -25,64 +19,71 @@ pub enum Error { ViewChangeNotFound, } -/// The proof of a view change. It needs to be signed by f+1 peers for proof to be valid and view change to happen. #[derive(Debug, Clone, Decode, Encode)] -pub struct Proof { +struct ProofPayload { /// Hash of the latest committed block. - pub latest_block_hash: Option>, + latest_block_hash: Option>, /// Within a round, what is the index of the view change this proof is trying to prove. - pub view_change_index: u64, + view_change_index: u64, +} + +/// The proof of a view change. It needs to be signed by f+1 peers for proof to be valid and view change to happen. +#[derive(Debug, Clone, Decode, Encode)] +pub struct SignedProof { + signatures: SignaturesOf, /// Collection of signatures from the different peers. - pub signatures: Vec, + payload: ProofPayload, } -impl Proof { - /// Produce a signature payload in the form of a [`Hash`] - pub fn signature_payload(&self) -> Hash { - let mut buf = [0_u8; Hash::LENGTH + std::mem::size_of::()]; - if let Some(hash) = self.latest_block_hash { - buf[..Hash::LENGTH].copy_from_slice(hash.as_ref()); - } - buf[Hash::LENGTH..].copy_from_slice(&self.view_change_index.to_le_bytes()); - // Now we hash the buffer to produce a payload that is completely - // different between view change proofs in the same sumeragi round. - Hash::new(buf) +/// Builder for proofs +#[repr(transparent)] +pub struct ProofBuilder(SignedProof); + +impl ProofBuilder { + /// Constructor from index. + pub fn new(latest_block_hash: Option>, view_change_index: u64) -> Self { + let proof = SignedProof { + payload: ProofPayload { + latest_block_hash, + view_change_index, + }, + signatures: [].into_iter().collect(), + }; + + Self(proof) } /// Sign this message with the peer's public and private key. - /// This way peers vote for changing the view (changing the roles of peers). /// /// # Errors /// Can fail during creation of signature - pub fn sign(&mut self, key_pair: KeyPair) -> Result<()> { - let signature = Signature::new(key_pair, self.signature_payload().as_ref())?; - self.signatures.push(signature); - Ok(()) + pub fn sign(mut self, key_pair: KeyPair) -> Result { + let signature = SignatureOf::new(key_pair, &self.0.payload)?; + self.0.signatures.insert(signature); + Ok(self.0) } +} +impl SignedProof { /// Verify the signatures of `other` and add them to this proof. - pub fn merge_signatures(&mut self, other: Vec) { - let signature_payload = self.signature_payload(); + fn merge_signatures(&mut self, other: SignaturesOf) { for signature in other { - if signature.verify(signature_payload.as_ref()).is_ok() - && !self.signatures.contains(&signature) - { - self.signatures.push(signature); + if signature.verify(&self.payload).is_ok() { + self.signatures.insert(signature); } } } /// Verify if the proof is valid, given the peers in `topology`. - pub fn verify(&self, peers: &[PeerId], max_faults: usize) -> bool { + fn verify(&self, peers: &[PeerId], max_faults: usize) -> bool { let peer_public_keys: HashSet<&PublicKey> = peers.iter().map(|peer_id| &peer_id.public_key).collect(); - let signature_payload = self.signature_payload(); let valid_count = self .signatures .iter() .filter(|signature| { - signature.verify(signature_payload.as_ref()).is_ok() + signature.verify(&self.payload).is_ok() && peer_public_keys.contains(signature.public_key()) }) .count(); @@ -97,7 +98,7 @@ impl Proof { /// Structure representing sequence of view change proofs. #[derive(Debug, Clone, Encode, Decode, Deref, DerefMut, Default)] -pub struct ProofChain(Vec); +pub struct ProofChain(Vec); impl ProofChain { /// Verify the view change proof chain. @@ -105,25 +106,26 @@ impl ProofChain { &self, peers: &[PeerId], max_faults: usize, - latest_block: Option>, + latest_block_hash: Option>, ) -> usize { self.iter() .enumerate() .take_while(|(i, proof)| { - proof.latest_block_hash == latest_block - && proof.view_change_index == (*i as u64) + proof.payload.latest_block_hash == latest_block_hash + && proof.payload.view_change_index == (*i as u64) && proof.verify(peers, max_faults) }) .count() } /// Remove invalid proofs from the chain. - pub fn prune(&mut self, latest_block: Option>) { + pub fn prune(&mut self, latest_block_hash: Option>) { let valid_count = self .iter() .enumerate() .take_while(|(i, proof)| { - proof.latest_block_hash == latest_block && proof.view_change_index == (*i as u64) + proof.payload.latest_block_hash == latest_block_hash + && proof.payload.view_change_index == (*i as u64) }) .count(); self.truncate(valid_count); @@ -134,19 +136,19 @@ impl ProofChain { /// # Errors /// - If proof latest block hash doesn't match peer latest block hash /// - If proof view change number differs from view change number - #[allow(clippy::expect_used, clippy::unwrap_in_result)] pub fn insert_proof( &mut self, peers: &[PeerId], max_faults: usize, - latest_block: Option>, - new_proof: Proof, + latest_block_hash: Option>, + new_proof: SignedProof, ) -> Result<(), Error> { - if new_proof.latest_block_hash != latest_block { + if new_proof.payload.latest_block_hash != latest_block_hash { return Err(Error::BlockHashMismatch); } - let next_unfinished_view_change = self.verify_with_state(peers, max_faults, latest_block); - if new_proof.view_change_index != (next_unfinished_view_change as u64) { + let next_unfinished_view_change = + self.verify_with_state(peers, max_faults, latest_block_hash); + if new_proof.payload.view_change_index != (next_unfinished_view_change as u64) { return Err(Error::ViewChangeNotFound); // We only care about the current view change that may or may not happen. } @@ -169,7 +171,7 @@ impl ProofChain { mut other: Self, peers: &[PeerId], max_faults: usize, - latest_block_hash: Option>, + latest_block_hash: Option>, ) -> Result<(), Error> { // Prune to exclude invalid proofs other.prune(latest_block_hash); diff --git a/core/src/tx.rs b/core/src/tx.rs index 2607c715e31..790d8942326 100644 --- a/core/src/tx.rs +++ b/core/src/tx.rs @@ -7,14 +7,6 @@ //! //! This is also where the actual execution of instructions, as well //! as various forms of validation are performed. -// TODO: Add full lifecycle docs. -#![allow( - clippy::new_without_default, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects -)] - use eyre::Result; use iroha_crypto::{HashOf, SignatureVerificationFail, SignaturesOf}; pub use iroha_data_model::prelude::*; @@ -31,19 +23,165 @@ use crate::{prelude::*, smartcontracts::wasm}; /// `AcceptedTransaction` — a transaction accepted by iroha peer. #[derive(Debug, Clone, PartialEq, Eq)] // TODO: Inner field should be private to maintain invariants -pub struct AcceptedTransaction(pub(crate) VersionedSignedTransaction); +pub struct AcceptedTransaction(pub(crate) SignedTransaction); -/// Error type for transaction from [`VersionedSignedTransaction`] to [`AcceptedTransaction`] +/// Error type for transaction from [`SignedTransaction`] to [`AcceptedTransaction`] #[derive(Debug, FromVariant, thiserror::Error, displaydoc::Display)] pub enum AcceptTransactionFail { /// Failure during limits check TransactionLimit(#[source] TransactionLimitError), /// Failure during signature verification SignatureVerification(#[source] SignatureVerificationFail), + /// The genesis account can only sign transactions in the genesis block + UnexpectedGenesisAccountSignature, +} + +mod len { + use iroha_data_model::{expression::*, query::QueryBox, Value}; + + pub trait ExprLen { + fn len(&self) -> usize; + } + + impl> ExprLen for EvaluatesTo { + fn len(&self) -> usize { + self.expression.len() + } + } + + impl ExprLen for Expression { + fn len(&self) -> usize { + use Expression::*; + + match self { + Add(add) => add.len(), + Subtract(subtract) => subtract.len(), + Greater(greater) => greater.len(), + Less(less) => less.len(), + Equal(equal) => equal.len(), + Not(not) => not.len(), + And(and) => and.len(), + Or(or) => or.len(), + If(if_expression) => if_expression.len(), + Raw(raw) => raw.len(), + Query(query) => query.len(), + Contains(contains) => contains.len(), + ContainsAll(contains_all) => contains_all.len(), + ContainsAny(contains_any) => contains_any.len(), + Where(where_expression) => where_expression.len(), + ContextValue(context_value) => context_value.len(), + Multiply(multiply) => multiply.len(), + Divide(divide) => divide.len(), + Mod(modulus) => modulus.len(), + RaiseTo(raise_to) => raise_to.len(), + } + } + } + impl ExprLen for ContextValue { + fn len(&self) -> usize { + 1 + } + } + impl ExprLen for QueryBox { + fn len(&self) -> usize { + 1 + } + } + + impl ExprLen for Add { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for Subtract { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for Multiply { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for RaiseTo { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for Divide { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for Mod { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for Greater { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for Less { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for Equal { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for And { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + impl ExprLen for Or { + fn len(&self) -> usize { + self.left.len() + self.right.len() + 1 + } + } + + impl ExprLen for Not { + fn len(&self) -> usize { + self.expression.len() + 1 + } + } + + impl ExprLen for Contains { + fn len(&self) -> usize { + self.collection.len() + self.element.len() + 1 + } + } + impl ExprLen for ContainsAll { + fn len(&self) -> usize { + self.collection.len() + self.elements.len() + 1 + } + } + impl ExprLen for ContainsAny { + fn len(&self) -> usize { + self.collection.len() + self.elements.len() + 1 + } + } + + impl ExprLen for If { + fn len(&self) -> usize { + // TODO: This is wrong because we don't evaluate both branches + self.condition.len() + self.then.len() + self.otherwise.len() + 1 + } + } + impl ExprLen for Where { + fn len(&self) -> usize { + self.expression.len() + self.values.values().map(EvaluatesTo::len).sum::() + 1 + } + } } -fn instruction_size(isi: &InstructionBox) -> usize { - use InstructionBox::*; +fn instruction_size(isi: &InstructionExpr) -> usize { + use len::ExprLen as _; + use InstructionExpr::*; match isi { Register(isi) => isi.object.len() + 1, @@ -77,15 +215,19 @@ impl AcceptedTransaction { Self(tx.0) } - /// Accept transaction. Transition from [`VersionedSignedTransaction`] to [`AcceptedTransaction`]. + /// Accept transaction. Transition from [`SignedTransaction`] to [`AcceptedTransaction`]. /// /// # Errors /// /// - if it does not adhere to limits pub fn accept( - transaction: VersionedSignedTransaction, + transaction: SignedTransaction, limits: &TransactionLimits, ) -> Result { + if *iroha_genesis::GENESIS_ACCOUNT_ID == transaction.payload().authority { + return Err(AcceptTransactionFail::UnexpectedGenesisAccountSignature); + } + match &transaction.payload().instructions { Executable::Instructions(instructions) => { let instruction_count: u64 = instructions @@ -132,7 +274,7 @@ impl AcceptedTransaction { } /// Transaction hash - pub fn hash(&self) -> HashOf { + pub fn hash(&self) -> HashOf { self.0.hash() } @@ -150,7 +292,7 @@ impl AcceptedTransaction { } } -impl From for VersionedSignedTransaction { +impl From for SignedTransaction { fn from(source: AcceptedTransaction) -> Self { source.0 } @@ -166,13 +308,13 @@ impl From for (AccountId, Executable) { /// /// Validation is skipped for genesis. #[derive(Clone, Copy)] -pub struct TransactionValidator { +pub struct TransactionExecutor { /// [`TransactionLimits`] field pub transaction_limits: TransactionLimits, } -impl TransactionValidator { - /// Construct [`TransactionValidator`] +impl TransactionExecutor { + /// Construct [`TransactionExecutor`] pub fn new(transaction_limits: TransactionLimits) -> Self { Self { transaction_limits } } @@ -188,8 +330,7 @@ impl TransactionValidator { &self, tx: AcceptedTransaction, wsv: &mut WorldStateView, - ) -> Result - { + ) -> Result { if let Err(rejection_reason) = self.validate_internal(tx.clone(), wsv) { return Err((tx.0, rejection_reason)); } @@ -204,10 +345,6 @@ impl TransactionValidator { ) -> Result<(), TransactionRejectionReason> { let authority = &tx.payload().authority; - if wsv.height() > 0 && *iroha_genesis::GENESIS_ACCOUNT_ID == *authority { - return Err(TransactionRejectionReason::UnexpectedGenesisAccountSignature); - } - if !wsv .domain(&authority.domain_id) .map_err(|_e| { @@ -227,7 +364,7 @@ impl TransactionValidator { let mut wsv_for_validation = wsv.clone(); debug!("Validating transaction: {:?}", tx); - Self::validate_with_runtime_validator(tx.clone(), &mut wsv_for_validation)?; + Self::validate_with_runtime_executor(tx.clone(), &mut wsv_for_validation)?; if let (authority, Executable::Wasm(bytes)) = tx.into() { self.validate_wasm(authority, &mut wsv_for_validation, bytes)? @@ -264,25 +401,25 @@ impl TransactionValidator { .map_err(TransactionRejectionReason::WasmExecution) } - /// Validate transaction with runtime validators. + /// Validate transaction with runtime executors. /// /// Note: transaction instructions will be executed on the given `wsv`. - fn validate_with_runtime_validator( + fn validate_with_runtime_executor( tx: AcceptedTransaction, wsv: &mut WorldStateView, ) -> Result<(), TransactionRejectionReason> { - let tx: VersionedSignedTransaction = tx.into(); + let tx: SignedTransaction = tx.into(); let authority = tx.payload().authority.clone(); - wsv.validator() - .clone() // Cloning validator is a cheap operation + wsv.executor() + .clone() // Cloning executor is a cheap operation .validate_transaction(wsv, &authority, tx) .map_err(|error| { if let ValidationFail::InternalError(msg) = &error { error!( error = msg, "Internal error occurred during transaction validation, \ - is Runtime Validator correct?" + is Runtime Executor correct?" ) } error.into() @@ -298,12 +435,12 @@ mod tests { fn if_instruction( c: impl Into, - then: InstructionBox, - otherwise: Option, - ) -> InstructionBox { + then: InstructionExpr, + otherwise: Option, + ) -> InstructionExpr { let condition: Expression = c.into(); let condition = EvaluatesTo::new_unchecked(condition); - Conditional { + ConditionalExpr { condition, then, otherwise, @@ -311,8 +448,8 @@ mod tests { .into() } - fn fail() -> InstructionBox { - FailBox { + fn fail() -> InstructionExpr { + Fail { message: String::default(), } .into() @@ -320,7 +457,7 @@ mod tests { #[test] fn len_empty_sequence() { - assert_eq!(instruction_size(&SequenceBox::new(vec![]).into()), 1); + assert_eq!(instruction_size(&SequenceExpr::new(vec![]).into()), 1); } #[test] @@ -333,7 +470,7 @@ mod tests { None, )]; - assert_eq!(instruction_size(&SequenceBox::new(instructions).into()), 4); + assert_eq!(instruction_size(&SequenceExpr::new(instructions).into()), 4); } #[test] @@ -350,6 +487,6 @@ mod tests { fail(), ]; - assert_eq!(instruction_size(&SequenceBox::new(instructions).into()), 7); + assert_eq!(instruction_size(&SequenceExpr::new(instructions).into()), 7); } } diff --git a/core/src/wsv.rs b/core/src/wsv.rs index 2a8b458361f..eaac7559502 100644 --- a/core/src/wsv.rs +++ b/core/src/wsv.rs @@ -1,12 +1,5 @@ //! This module provides the [`WorldStateView`] — an in-memory representation of the current blockchain //! state. -#![allow( - clippy::new_without_default, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects -)] - use std::{ borrow::Borrow, collections::{BTreeSet, HashMap}, @@ -24,34 +17,36 @@ use iroha_config::{ use iroha_crypto::HashOf; use iroha_data_model::{ account::AccountId, - block::{CommittedBlock, VersionedCommittedBlock}, + block::SignedBlock, events::notification::{TriggerCompletedEvent, TriggerCompletedOutcome}, isi::error::{InstructionExecutionError as Error, MathError}, parameter::Parameter, permission::PermissionTokenSchema, prelude::*, query::error::{FindError, QueryExecutionFail}, - trigger::action::ActionTrait, }; use iroha_logger::prelude::*; use iroha_primitives::small::SmallVec; use parking_lot::Mutex; +use range_bounds::RoleIdByAccountBounds; use serde::{ de::{DeserializeSeed, MapAccess, Visitor}, Deserializer, Serialize, }; use crate::{ + block::CommittedBlock, + executor::Executor, kura::Kura, + query::store::LiveQueryStoreHandle, smartcontracts::{ triggers::{ self, - set::{LoadedExecutable, LoadedWasm, Set as TriggerSet}, + set::{LoadedActionTrait, LoadedWasm, Set as TriggerSet}, }, wasm, Execute, }, - tx::TransactionValidator, - validator::Validator, + tx::TransactionExecutor, DomainsMap, Parameters, PeersIds, }; @@ -69,12 +64,14 @@ pub struct World { pub(crate) roles: crate::RolesMap, /// Permission tokens of an account. pub(crate) account_permission_tokens: crate::PermissionTokensMap, + /// Roles of an account. + pub(crate) account_roles: crate::AccountRolesSet, /// Registered permission token ids. pub(crate) permission_token_schema: PermissionTokenSchema, /// Triggers pub(crate) triggers: TriggerSet, - /// Runtime Validator - pub(crate) validator: Validator, + /// Runtime Executor + pub(crate) executor: Executor, } // Loader for [`Set`] @@ -168,9 +165,10 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, World> { let mut domains = None; let mut roles = None; let mut account_permission_tokens = None; + let mut account_roles = None; let mut permission_token_schema = None; let mut triggers = None; - let mut validator = None; + let mut executor = None; while let Some(key) = map.next_key::()? { match key.as_str() { @@ -189,14 +187,17 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, World> { "account_permission_tokens" => { account_permission_tokens = Some(map.next_value()?); } + "account_roles" => { + account_roles = Some(map.next_value()?); + } "permission_token_schema" => { permission_token_schema = Some(map.next_value()?); } "triggers" => { triggers = Some(map.next_value_seed(self.loader.cast::())?); } - "validator" => { - validator = Some(map.next_value_seed(self.loader.cast::())?); + "executor" => { + executor = Some(map.next_value_seed(self.loader.cast::())?); } _ => { /* Skip unknown fields */ } } @@ -212,13 +213,15 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, World> { account_permission_tokens: account_permission_tokens.ok_or_else(|| { serde::de::Error::missing_field("account_permission_tokens") })?, + account_roles: account_roles + .ok_or_else(|| serde::de::Error::missing_field("account_roles"))?, permission_token_schema: permission_token_schema.ok_or_else(|| { serde::de::Error::missing_field("permission_token_schema") })?, triggers: triggers .ok_or_else(|| serde::de::Error::missing_field("triggers"))?, - validator: validator - .ok_or_else(|| serde::de::Error::missing_field("validator"))?, + executor: executor + .ok_or_else(|| serde::de::Error::missing_field("executor"))?, }) } } @@ -231,9 +234,10 @@ impl<'de> DeserializeSeed<'de> for WasmSeed<'_, World> { "domains", "roles", "account_permission_tokens", + "account_roles", "permission_token_schema", "triggers", - "validator", + "executor", ], WorldVisitor { loader: &self }, ) @@ -247,19 +251,17 @@ impl World { } /// Creates a [`World`] with these [`Domain`]s and trusted [`PeerId`]s. - pub fn with(domains: D, trusted_peers_ids: P) -> Self + pub fn with(domains: D, trusted_peers_ids: PeersIds) -> Self where D: IntoIterator, - P: IntoIterator, { let domains = domains .into_iter() .map(|domain| (domain.id().clone(), domain)) .collect(); - let trusted_peers_ids = trusted_peers_ids.into_iter().collect(); World { - domains, trusted_peers_ids, + domains, ..World::new() } } @@ -273,9 +275,9 @@ pub struct WorldStateView { /// Configuration of World State View. pub config: Configuration, /// Blockchain. - pub block_hashes: Vec>, + pub block_hashes: Vec>, /// Hashes of transactions mapped onto block height where they stored - pub transactions: HashMap, u64>, + pub transactions: HashMap, u64>, /// Buffer containing events generated during `WorldStateView::apply`. Renewed on every block commit. #[serde(skip)] pub events_buffer: Vec, @@ -286,6 +288,9 @@ pub struct WorldStateView { /// Reference to Kura subsystem. #[serde(skip)] kura: Arc, + /// Handle to the [`LiveQueryStore`]. + #[serde(skip)] + query_handle: LiveQueryStoreHandle, /// Temporary metrics buffer of amounts of any asset that has been transacted. #[serde(skip)] pub new_tx_amounts: Arc>>, @@ -295,6 +300,8 @@ pub struct WorldStateView { pub struct KuraSeed { /// Kura subsystem reference pub kura: Arc, + /// Handle to the [`LiveQueryStore`](crate::query::store::LiveQueryStore). + pub query_handle: LiveQueryStoreHandle, } impl<'de> DeserializeSeed<'de> for KuraSeed { @@ -357,6 +364,7 @@ impl<'de> DeserializeSeed<'de> for KuraSeed { transactions: transactions .ok_or_else(|| serde::de::Error::missing_field("transactions"))?, kura: self.loader.kura, + query_handle: self.loader.query_handle, engine, events_buffer: Vec::new(), new_tx_amounts: Arc::new(Mutex::new(Vec::new())), @@ -383,6 +391,7 @@ impl Clone for WorldStateView { new_tx_amounts: Arc::clone(&self.new_tx_amounts), engine: self.engine.clone(), kura: Arc::clone(&self.kura), + query_handle: self.query_handle.clone(), } } } @@ -392,12 +401,12 @@ impl WorldStateView { /// Construct [`WorldStateView`] with given [`World`]. #[must_use] #[inline] - pub fn new(world: World, kura: Arc) -> Self { + pub fn new(world: World, kura: Arc, query_handle: LiveQueryStoreHandle) -> Self { // Added to remain backward compatible with other code primary in tests let config = ConfigurationProxy::default() .build() .expect("Wsv proxy always builds"); - Self::from_configuration(config, world, kura) + Self::from_configuration(config, world, kura, query_handle) } /// Get `Account`'s `Asset`s @@ -411,6 +420,14 @@ impl WorldStateView { self.map_account(id, |account| account.assets.values()) } + /// Get [`Account`]'s [`RoleId`]s + pub fn account_roles(&self, id: &AccountId) -> impl Iterator { + self.world + .account_roles + .range(RoleIdByAccountBounds::new(id)) + .map(|role| &role.role_id) + } + /// Return a set of all permission tokens granted to this account. /// /// # Errors @@ -420,13 +437,13 @@ impl WorldStateView { &self, account_id: &AccountId, ) -> Result, FindError> { - let account = self.account(account_id)?; + self.account(account_id)?; let mut tokens = self .account_inherent_permission_tokens(account_id) .collect::>(); - for role_id in &account.roles { + for role_id in self.account_roles(account_id) { if let Some(role) = self.world.roles.get(role_id) { tokens.extend(role.permissions.iter()); } @@ -501,7 +518,7 @@ impl WorldStateView { fn process_trigger( &mut self, id: &TriggerId, - action: &dyn ActionTrait, + action: &dyn LoadedActionTrait, event: Event, ) -> Result<()> { use triggers::set::LoadedExecutable::*; @@ -530,6 +547,8 @@ impl WorldStateView { let mut succeed = Vec::::with_capacity(matched_ids.len()); let mut errors = Vec::new(); for (event, id) in matched_ids { + // Eliding the closure triggers a lifetime mismatch + #[allow(clippy::redundant_closure_for_method_calls)] let action = self .world .triggers @@ -583,7 +602,7 @@ impl WorldStateView { fn process_instructions( &mut self, - instructions: impl IntoIterator, + instructions: impl IntoIterator, authority: &AccountId, ) -> Result<()> { instructions.into_iter().try_for_each(|instruction| { @@ -612,8 +631,8 @@ impl WorldStateView { deprecated(note = "This function is to be used in testing only. ") )] #[iroha_logger::log(skip_all, fields(block_height))] - pub fn apply(&mut self, block: &VersionedCommittedBlock) -> Result<()> { - self.execute_transactions(block.as_v1())?; + pub fn apply(&mut self, block: &CommittedBlock) -> Result<()> { + self.execute_transactions(block)?; debug!("All block transactions successfully executed"); self.apply_without_execution(block)?; @@ -623,21 +642,21 @@ impl WorldStateView { /// Apply transactions without actually executing them. /// It's assumed that block's transaction was already executed (as part of validation for example). - #[iroha_logger::log(skip_all, fields(block_height))] - pub fn apply_without_execution(&mut self, block: &VersionedCommittedBlock) -> Result<()> { - let hash = block.hash(); - let block = block.as_v1(); - iroha_logger::prelude::Span::current().record("block_height", block.header.height); - trace!("Applying block"); - let time_event = self.create_time_event(block)?; + #[iroha_logger::log(skip_all, fields(block_height = block.payload().header.height))] + pub fn apply_without_execution(&mut self, block: &CommittedBlock) -> Result<()> { + let block_hash = block.hash(); + trace!(%block_hash, "Applying block"); + + let time_event = self.create_time_event(block); self.events_buffer.push(Event::Time(time_event)); - let block_height = block.header().height; + let block_height = block.payload().header.height; block + .payload() .transactions .iter() .map(|tx| &tx.value) - .map(VersionedSignedTransaction::hash) + .map(SignedTransaction::hash) .for_each(|tx_hash| { self.transactions.insert(tx_hash, block_height); }); @@ -653,7 +672,7 @@ impl WorldStateView { ); } - self.block_hashes.push(hash); + self.block_hashes.push(block_hash); self.apply_parameters(); @@ -684,40 +703,38 @@ impl WorldStateView { } } - /// Get transaction validator - pub fn transaction_validator(&self) -> TransactionValidator { - TransactionValidator::new(self.config.transaction_limits) + /// Get transaction executor + pub fn transaction_executor(&self) -> TransactionExecutor { + TransactionExecutor::new(self.config.transaction_limits) } /// Get a reference to the latest block. Returns none if genesis is not committed. #[inline] - pub fn latest_block_ref(&self) -> Option> { + pub fn latest_block_ref(&self) -> Option> { self.kura .get_block_by_height(self.block_hashes.len() as u64) } /// Create time event using previous and current blocks - fn create_time_event(&self, block: &CommittedBlock) -> Result { - let prev_interval = self - .latest_block_ref() - .map(|latest_block| { - let header = &latest_block.as_v1().header; - header.timestamp.try_into().map(|since| TimeInterval { - since: Duration::from_millis(since), - length: Duration::from_millis(header.consensus_estimation), - }) - }) - .transpose()?; + fn create_time_event(&self, block: &CommittedBlock) -> TimeEvent { + let prev_interval = self.latest_block_ref().map(|latest_block| { + let header = &latest_block.payload().header; + + TimeInterval { + since: header.timestamp(), + length: header.consensus_estimation(), + } + }); let interval = TimeInterval { - since: Duration::from_millis(block.header.timestamp.try_into()?), - length: Duration::from_millis(block.header.consensus_estimation), + since: block.payload().header.timestamp(), + length: block.payload().header.consensus_estimation(), }; - Ok(TimeEvent { + TimeEvent { prev_interval, interval, - }) + } } /// Execute `block` transactions and store their hashes as well as @@ -727,7 +744,7 @@ impl WorldStateView { /// Fails if transaction instruction execution fails fn execute_transactions(&mut self, block: &CommittedBlock) -> Result<()> { // TODO: Should this block panic instead? - for tx in &block.transactions { + for tx in &block.payload().transactions { if tx.error.is_none() { self.process_executable( tx.payload().instructions(), @@ -765,33 +782,48 @@ impl WorldStateView { #[allow(clippy::missing_panics_doc)] pub fn asset_or_insert( &mut self, - id: &AssetId, + asset_id: AssetId, default_asset_value: impl Into, - ) -> Result { - if let Ok(asset) = self.asset(id) { - return Ok(asset); + ) -> Result<&mut Asset, Error> { + // Check that asset definition exists + { + let asset_definition_id = &asset_id.definition_id; + let asset_definition_domain_id = &asset_id.definition_id.domain_id; + let asset_definition_domain = self + .world + .domains + .get(asset_definition_domain_id) + .ok_or(FindError::Domain(asset_definition_domain_id.clone()))?; + asset_definition_domain + .asset_definitions + .get(asset_definition_id) + .ok_or(FindError::AssetDefinition(asset_definition_id.clone()))?; } - // This function is strictly infallible. - let asset = self - .account_mut(&id.account_id) - .map(|account| { - let asset = Asset::new(id.clone(), default_asset_value.into()); - assert!(account.add_asset(asset.clone()).is_none()); - asset - }) - .map_err(|err| { - iroha_logger::warn!(?err); - err - })?; - - self.emit_events(Some(AccountEvent::Asset(AssetEvent::Created(asset)))); - - self.asset(id).map_err(Into::into) + let account_id = &asset_id.account_id; + let account_domain = self + .world + .domains + .get_mut(&asset_id.account_id.domain_id) + .ok_or(FindError::Domain(asset_id.account_id.domain_id.clone()))?; + let account = account_domain + .accounts + .get_mut(account_id) + .ok_or(FindError::Account(account_id.clone()))?; + + Ok(account.assets.entry(asset_id.clone()).or_insert_with(|| { + let asset = Asset::new(asset_id, default_asset_value.into()); + Self::emit_events_impl( + &mut self.world.triggers, + &mut self.events_buffer, + Some(AccountEvent::Asset(AssetEvent::Created(asset.clone()))), + ); + asset + })) } /// Load all blocks in the block chain from disc - pub fn all_blocks(&self) -> impl DoubleEndedIterator> + '_ { + pub fn all_blocks(&self) -> impl DoubleEndedIterator> + '_ { let block_count = self.block_hashes.len() as u64; (1..=block_count).map(|height| { self.kura @@ -803,8 +835,8 @@ impl WorldStateView { /// Return a vector of blockchain blocks after the block with the given `hash` pub fn block_hashes_after_hash( &self, - hash: Option>, - ) -> Vec> { + hash: Option>, + ) -> Vec> { hash.map_or_else( || self.block_hashes.clone(), |block_hash| { @@ -830,7 +862,7 @@ impl WorldStateView { } /// Return an iterator over blockchain block hashes starting with the block of the given `height` - pub fn block_hashes_from_height(&self, height: usize) -> Vec> { + pub fn block_hashes_from_height(&self, height: usize) -> Vec> { self.block_hashes .iter() .skip(height.saturating_sub(1)) @@ -874,7 +906,6 @@ impl WorldStateView { /// /// # Errors /// Fails if there is no domain - #[allow(clippy::panic_in_result_fn)] pub fn map_domain<'wsv, T>( &'wsv self, id: &DomainId, @@ -899,7 +930,12 @@ impl WorldStateView { /// Construct [`WorldStateView`] with specific [`Configuration`]. #[inline] - pub fn from_configuration(config: Configuration, world: World, kura: Arc) -> Self { + pub fn from_configuration( + config: Configuration, + world: World, + kura: Arc, + query_handle: LiveQueryStoreHandle, + ) -> Self { Self { world, config, @@ -909,20 +945,21 @@ impl WorldStateView { new_tx_amounts: Arc::new(Mutex::new(Vec::new())), engine: wasm::create_engine(), kura, + query_handle, } } /// Returns [`Some`] milliseconds since the genesis block was /// committed, or [`None`] if it wasn't. #[inline] - pub fn genesis_timestamp(&self) -> Option { + pub fn genesis_timestamp(&self) -> Option { if self.block_hashes.is_empty() { None } else { let opt = self .kura .get_block_by_height(1) - .map(|genesis_block| genesis_block.as_v1().header.timestamp); + .map(|genesis_block| genesis_block.payload().header.timestamp()); if opt.is_none() { error!("Failed to get genesis block from Kura."); @@ -931,9 +968,9 @@ impl WorldStateView { } } - /// Check if this [`VersionedSignedTransaction`] is already committed or rejected. + /// Check if this [`SignedTransaction`] is already committed or rejected. #[inline] - pub fn has_transaction(&self, hash: HashOf) -> bool { + pub fn has_transaction(&self, hash: HashOf) -> bool { self.transactions.contains_key(&hash) } @@ -944,7 +981,7 @@ impl WorldStateView { } /// Return the hash of the latest block - pub fn latest_block_hash(&self) -> Option> { + pub fn latest_block_hash(&self) -> Option> { self.block_hashes.iter().nth_back(0).copied() } @@ -952,11 +989,11 @@ impl WorldStateView { pub fn latest_block_view_change_index(&self) -> u64 { self.kura .get_block_by_height(self.height()) - .map_or(0, |block| block.as_v1().header.view_change_index) + .map_or(0, |block| block.payload().header.view_change_index) } /// Return the hash of the block one before the latest block - pub fn previous_block_hash(&self) -> Option> { + pub fn previous_block_hash(&self) -> Option> { self.block_hashes.iter().nth_back(1).copied() } @@ -973,11 +1010,15 @@ impl WorldStateView { let account = domain .accounts .get(id) - .ok_or(QueryExecutionFail::Unauthorized)?; + .ok_or(FindError::Account(id.clone()))?; Ok(f(account)) } - fn account(&self, id: &AccountId) -> Result<&Account, FindError> { + /// Get `Account` and return reference to it. + /// + /// # Errors + /// Fails if there is no domain or account + pub fn account(&self, id: &AccountId) -> Result<&Account, FindError> { self.domain(&id.domain_id).and_then(|domain| { domain .accounts @@ -1163,11 +1204,8 @@ impl WorldStateView { Ok(()) } - /// Find a [`VersionedSignedTransaction`] by hash. - pub fn block_with_tx( - &self, - hash: &HashOf, - ) -> Option> { + /// Find a [`SignedBlock`] by hash. + pub fn block_with_tx(&self, hash: &HashOf) -> Option> { let height = *self.transactions.get(hash)?; self.kura.get_block_by_height(height) } @@ -1212,15 +1250,30 @@ impl WorldStateView { self.events_buffer.push(event.into()); } - /// Get [`Validator`]. - pub fn validator(&self) -> &Validator { - &self.world.validator + /// Get [`Executor`]. + pub fn executor(&self) -> &Executor { + &self.world.executor } /// The function puts events produced by iterator into `events_buffer`. /// Events should be produced in the order of expanding scope: from specific to general. /// Example: account events before domain events. pub fn emit_events, T: Into>(&mut self, world_events: I) { + Self::emit_events_impl( + &mut self.world.triggers, + &mut self.events_buffer, + world_events, + ) + } + + /// Implementation of [`Self::emit_events()`]. + /// + /// Usable when you can't call [`Self::emit_events()`] due to mutable reference to self. + fn emit_events_impl, T: Into>( + triggers: &mut TriggerSet, + events_buffer: &mut Vec, + world_events: I, + ) { let data_events: SmallVec<[DataEvent; 3]> = world_events .into_iter() .map(Into::into) @@ -1228,10 +1281,9 @@ impl WorldStateView { .collect(); for event in data_events.iter() { - self.world.triggers.handle_data_event(event.clone()); + triggers.handle_data_event(event.clone()); } - self.events_buffer - .extend(data_events.into_iter().map(Into::into)); + events_buffer.extend(data_events.into_iter().map(Into::into)); } /// Set new permission token schema. @@ -1246,28 +1298,104 @@ impl WorldStateView { }, ))) } + + /// Get reference to the [`LiveQueryStoreHandle`]. + pub fn query_handle(&self) -> &LiveQueryStoreHandle { + &self.query_handle + } +} + +/// Bounds for `range` queries +mod range_bounds { + use core::ops::{Bound, RangeBounds}; + + use iroha_primitives::{cmpext::MinMaxExt, impl_as_dyn_key}; + + use super::*; + use crate::role::RoleIdWithOwner; + + /// Key for range queries over account for roles + #[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone)] + pub struct RoleIdByAccount<'role> { + account_id: &'role AccountId, + role_id: MinMaxExt<&'role RoleId>, + } + + /// Bounds for range quired over account for roles + pub struct RoleIdByAccountBounds<'role> { + start: RoleIdByAccount<'role>, + end: RoleIdByAccount<'role>, + } + + impl<'role> RoleIdByAccountBounds<'role> { + /// Create range bounds for range quires of roles over account + pub fn new(account_id: &'role AccountId) -> Self { + Self { + start: RoleIdByAccount { + account_id, + role_id: MinMaxExt::Min, + }, + end: RoleIdByAccount { + account_id, + role_id: MinMaxExt::Max, + }, + } + } + } + + impl<'role> RangeBounds for RoleIdByAccountBounds<'role> { + fn start_bound(&self) -> Bound<&(dyn AsRoleIdByAccount + 'role)> { + Bound::Excluded(&self.start) + } + + fn end_bound(&self) -> Bound<&(dyn AsRoleIdByAccount + 'role)> { + Bound::Excluded(&self.end) + } + } + + impl AsRoleIdByAccount for RoleIdWithOwner { + fn as_key(&self) -> RoleIdByAccount<'_> { + RoleIdByAccount { + account_id: &self.account_id, + role_id: (&self.role_id).into(), + } + } + } + + impl_as_dyn_key! { + target: RoleIdWithOwner, + key: RoleIdByAccount<'_>, + trait: AsRoleIdByAccount + } } #[cfg(test)] mod tests { - #![allow(clippy::restriction)] + use iroha_primitives::unique_vec::UniqueVec; use super::*; - use crate::block::PendingBlock; + use crate::{ + block::ValidBlock, query::store::LiveQueryStore, role::RoleIdWithOwner, + sumeragi::network_topology::Topology, + }; - #[test] - fn get_block_hashes_after_hash() { + #[tokio::test] + async fn get_block_hashes_after_hash() { const BLOCK_CNT: usize = 10; - let mut block = PendingBlock::new_dummy().commit_unchecked(); + let topology = Topology::new(UniqueVec::new()); + let block = ValidBlock::new_dummy().commit(&topology).unwrap(); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(World::default(), kura); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(World::default(), kura, query_handle); let mut block_hashes = vec![]; for i in 1..=BLOCK_CNT { - block.header.height = i as u64; - block.header.previous_block_hash = block_hashes.last().copied(); - let block: VersionedCommittedBlock = block.clone().into(); + let mut block = block.clone(); + + block.0.payload_mut().header.height = i as u64; + block.0.payload_mut().header.previous_block_hash = block_hashes.last().copied(); + block_hashes.push(block.hash()); wsv.apply(&block).unwrap(); } @@ -1278,17 +1406,22 @@ mod tests { .eq(block_hashes.into_iter().skip(7))); } - #[test] - fn get_blocks_from_height() { + #[tokio::test] + async fn get_blocks_from_height() { const BLOCK_CNT: usize = 10; - let mut block = PendingBlock::new_dummy().commit_unchecked(); + let topology = Topology::new(UniqueVec::new()); + let block = ValidBlock::new_dummy().commit(&topology).unwrap(); let kura = Kura::blank_kura_for_testing(); - let mut wsv = WorldStateView::new(World::default(), kura.clone()); + let query_handle = LiveQueryStore::test().start(); + let mut wsv = WorldStateView::new(World::default(), kura.clone(), query_handle); for i in 1..=BLOCK_CNT { - block.header.height = i as u64; - let block: VersionedCommittedBlock = block.clone().into(); + let mut block = block.clone(); + + let SignedBlock::V1(v1_block) = &mut block.0; + v1_block.payload.header.height = i as u64; + wsv.apply(&block).unwrap(); kura.store_block(block); } @@ -1296,9 +1429,31 @@ mod tests { assert_eq!( &wsv.all_blocks() .skip(7) - .map(|block| block.as_v1().header.height) + .map(|block| block.payload().header.height) .collect::>(), &[8, 9, 10] ); } + + #[test] + fn role_account_range() { + let account_id: AccountId = "alice@wonderland".parse().unwrap(); + let roles = [ + RoleIdWithOwner::new(account_id.clone(), "1".parse().unwrap()), + RoleIdWithOwner::new(account_id.clone(), "2".parse().unwrap()), + RoleIdWithOwner::new("bob@wonderland".parse().unwrap(), "3".parse().unwrap()), + RoleIdWithOwner::new("a@wonderland".parse().unwrap(), "4".parse().unwrap()), + RoleIdWithOwner::new("0@0".parse().unwrap(), "5".parse().unwrap()), + RoleIdWithOwner::new("1@1".parse().unwrap(), "6".parse().unwrap()), + ]; + let map = BTreeSet::from(roles); + + let range = map + .range(RoleIdByAccountBounds::new(&account_id)) + .collect::>(); + assert_eq!(range.len(), 2); + for role in range { + assert_eq!(&role.account_id, &account_id); + } + } } diff --git a/core/test_network/Cargo.toml b/core/test_network/Cargo.toml index 295e225700a..22cbae6888a 100644 --- a/core/test_network/Cargo.toml +++ b/core/test_network/Cargo.toml @@ -25,5 +25,5 @@ rand = { workspace = true } tempfile = { workspace = true } tokio = { workspace = true, features = ["rt", "rt-multi-thread", "macros"] } unique_port = "0.2.1" -parity-scale-codec = { version = "3.1.5", default-features = false } +parity-scale-codec = { version = "3.6.5", default-features = false } serde_json = { workspace = true } diff --git a/core/test_network/src/lib.rs b/core/test_network/src/lib.rs index 83f27f29f8c..25ba5941fa8 100644 --- a/core/test_network/src/lib.rs +++ b/core/test_network/src/lib.rs @@ -1,15 +1,8 @@ //! Module for starting peers and networks. Used only for tests -#![allow(clippy::restriction, clippy::future_not_send)] - use core::{fmt::Debug, str::FromStr as _, time::Duration}; #[cfg(debug_assertions)] use std::sync::atomic::AtomicBool; -use std::{ - collections::{HashMap, HashSet}, - path::Path, - sync::Arc, - thread, -}; +use std::{collections::BTreeMap, path::Path, sync::Arc, thread}; use eyre::Result; use futures::{prelude::*, stream::FuturesUnordered}; @@ -28,7 +21,11 @@ use iroha_data_model::{ }; use iroha_genesis::{GenesisNetwork, RawGenesisBlock}; use iroha_logger::{Configuration as LoggerConfiguration, InstrumentFutures}; -use iroha_primitives::addr::{socket_addr, SocketAddr}; +use iroha_primitives::{ + addr::{socket_addr, SocketAddr}, + unique_vec, + unique_vec::UniqueVec, +}; use rand::seq::IteratorRandom; use serde_json::json; use tempfile::TempDir; @@ -47,7 +44,9 @@ pub struct Network { /// Genesis peer which sends genesis block to everyone pub genesis: Peer, /// Peers excluding the `genesis` peer. Use [`Network::peers`] function to get all instead. - pub peers: HashMap, + /// + /// [`BTreeMap`] is used in order to have deterministic order of peers. + pub peers: BTreeMap, } /// Get a standardised key-pair from the hard-coded literals. @@ -62,7 +61,7 @@ pub fn get_key_pair() -> KeyPair { .expect("Public key not in mulithash format"), PrivateKey::from_hex( Algorithm::Ed25519, - "9AC47ABF59B356E0BD7DCBBBB4DEC080E302156A48CA907E47CB6AEA1D32719E7233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0".as_ref() + "9AC47ABF59B356E0BD7DCBBBB4DEC080E302156A48CA907E47CB6AEA1D32719E7233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" ).expect("Private key not hex encoded") ).expect("Key pair mismatch") } @@ -84,10 +83,9 @@ impl TestGenesis for GenesisNetwork { RawGenesisBlock::from_path(manifest_dir.join("../../configs/peer/genesis.json")) .expect("Failed to deserialize genesis block from file"); - let rose_definition_id = ::Id::from_str("rose#wonderland") - .expect("valid names"); - let alice_id = - ::Id::from_str("alice@wonderland").expect("valid names"); + let rose_definition_id = + AssetDefinitionId::from_str("rose#wonderland").expect("valid names"); + let alice_id = AccountId::from_str("alice@wonderland").expect("valid names"); let mint_rose_permission = PermissionToken::new( "CanMintAssetsWithDefinition".parse().unwrap(), @@ -105,8 +103,8 @@ impl TestGenesis for GenesisNetwork { "CanUnregisterDomain".parse().unwrap(), &json!({ "domain_id": DomainId::from_str("wonderland").unwrap() } ), ); - let upgrade_validator_permission = - PermissionToken::new("CanUpgradeValidator".parse().unwrap(), &json!(null)); + let upgrade_executor_permission = + PermissionToken::new("CanUpgradeExecutor".parse().unwrap(), &json!(null)); let first_transaction = genesis .first_transaction_mut() @@ -117,10 +115,10 @@ impl TestGenesis for GenesisNetwork { unregister_any_peer_permission, unregister_any_role_permission, unregister_wonderland_domain, - upgrade_validator_permission, + upgrade_executor_permission, ] { first_transaction - .append_instruction(GrantBox::new(permission, alice_id.clone()).into()); + .append_instruction(GrantExpr::new(permission, alice_id.clone()).into()); } if submit_genesis { @@ -181,10 +179,7 @@ impl Network { ) .await .expect("Failed to init peers"); - let client = Client::test( - &network.genesis.api_address, - &network.genesis.telemetry_address, - ); + let client = Client::test(&network.genesis.api_address); (network, client) } @@ -202,11 +197,11 @@ impl Network { /// Adds peer to network and waits for it to start block /// synchronization. pub async fn add_peer(&self) -> (Peer, Client) { - let genesis_client = - Client::test(&self.genesis.api_address, &self.genesis.telemetry_address); + let genesis_client = Client::test(&self.genesis.api_address); let mut config = Configuration::test(); - config.sumeragi.trusted_peers.peers = self.peers().map(|peer| &peer.id).cloned().collect(); + config.sumeragi.trusted_peers.peers = + UniqueVec::from_iter(self.peers().map(|peer| &peer.id).cloned()); let peer = PeerBuilder::new() .with_configuration(config) @@ -216,12 +211,12 @@ impl Network { time::sleep(Configuration::pipeline_time() + Configuration::block_sync_gossip_time()).await; - let add_peer = RegisterBox::new(DataModelPeer::new(peer.id.clone())); + let add_peer = RegisterExpr::new(DataModelPeer::new(peer.id.clone())); genesis_client .submit(add_peer) .expect("Failed to add new peer."); - let client = Client::test(&peer.api_address, &peer.telemetry_address); + let client = Client::test(&peer.api_address); (peer, client) } @@ -242,7 +237,6 @@ impl Network { offline_peers: u32, start_port: Option, ) -> Result { - #[allow(clippy::expect_used)] let mut builders = core::iter::repeat_with(PeerBuilder::new) .enumerate() .map(|(n, builder)| { @@ -265,7 +259,7 @@ impl Network { let mut configuration = default_configuration.unwrap_or_else(Configuration::test); configuration.sumeragi.trusted_peers.peers = - peers.iter().map(|peer| peer.id.clone()).collect(); + UniqueVec::from_iter(peers.iter().map(|peer| peer.id.clone())); let mut genesis_peer = peers.remove(0); let genesis_builder = builders.remove(0).with_configuration(configuration.clone()); @@ -297,7 +291,7 @@ impl Network { peers: peers .into_iter() .map(|peer| (peer.id.clone(), peer)) - .collect::>(), + .collect::>(), }) } @@ -309,7 +303,7 @@ impl Network { /// Get active clients pub fn clients(&self) -> Vec { self.peers() - .map(|peer| Client::test(&peer.api_address, &peer.telemetry_address)) + .map(|peer| Client::test(&peer.api_address)) .collect() } @@ -357,8 +351,6 @@ pub struct Peer { pub api_address: SocketAddr, /// P2P address pub p2p_address: SocketAddr, - /// Telemetry address - pub telemetry_address: SocketAddr, /// The key-pair for the peer pub key_pair: KeyPair, /// Shutdown handle @@ -394,20 +386,19 @@ impl Peer { /// Returns per peer config with all addresses, keys, and id set up. fn get_config(&self, configuration: Configuration) -> Configuration { Configuration { - sumeragi: SumeragiConfiguration { + sumeragi: Box::new(SumeragiConfiguration { key_pair: self.key_pair.clone(), peer_id: self.id.clone(), - ..configuration.sumeragi - }, - torii: ToriiConfiguration { + ..*configuration.sumeragi + }), + torii: Box::new(ToriiConfiguration { p2p_addr: self.p2p_address.clone(), api_url: self.api_address.clone(), - telemetry_url: self.telemetry_address.clone(), - ..configuration.torii - }, - logger: LoggerConfiguration { - ..configuration.logger - }, + ..*configuration.torii + }), + logger: Box::new(LoggerConfiguration { + ..*configuration.logger + }), public_key: self.key_pair.public_key().clone(), private_key: self.key_pair.private_key().clone(), disable_panic_terminal_colors: true, @@ -431,7 +422,6 @@ impl Peer { "test-peer", p2p_addr = %self.p2p_address, api_addr = %self.api_address, - telemetry_addr = %self.telemetry_address ); let telemetry = iroha_logger::init(&configuration.logger).expect("Failed to initialize telemetry"); @@ -480,13 +470,11 @@ impl Peer { /// * If can't get a unique port for /// - `p2p_address` /// - `api_address` - /// - `telemetry_address` /// * If keypair generation fails pub fn new() -> Result { let key_pair = KeyPair::generate()?; let p2p_address = local_unique_port()?; let api_address = local_unique_port()?; - let telemetry_address = local_unique_port()?; let id = PeerId { address: p2p_address.clone(), public_key: key_pair.public_key().clone(), @@ -497,7 +485,6 @@ impl Peer { key_pair, p2p_address, api_address, - telemetry_address, shutdown, iroha: None, temp_dir: None, @@ -599,7 +586,6 @@ impl PeerBuilder { if let Some(port) = self.port.take() { peer.p2p_address = socket_addr!(127.0.0 .1: port); peer.api_address = socket_addr!(127.0.0 .1: port + 1); - peer.telemetry_address = socket_addr!(127.0.0 .1: port + 2); // prevent field desync peer.id.address = peer.p2p_address.clone(); } @@ -610,7 +596,7 @@ impl PeerBuilder { pub async fn start_with_peer(self, peer: &mut Peer) { let configuration = self.configuration.unwrap_or_else(|| { let mut config = Configuration::test(); - config.sumeragi.trusted_peers.peers = std::iter::once(peer.id.clone()).collect(); + config.sumeragi.trusted_peers.peers = unique_vec![peer.id.clone()]; config }); let genesis = match self.genesis { @@ -641,7 +627,7 @@ impl PeerBuilder { let peer = self.start().await; - let client = Client::test(&peer.api_address, &peer.telemetry_address); + let client = Client::test(&peer.api_address); time::sleep(Duration::from_millis( configuration.sumeragi.pipeline_time_ms(), @@ -684,24 +670,19 @@ pub trait TestConfiguration { /// Client configuration mocking trait. pub trait TestClientConfiguration { /// Creates test client configuration - fn test(api_url: &SocketAddr, telemetry_url: &SocketAddr) -> Self; + fn test(api_url: &SocketAddr) -> Self; } /// Client mocking trait pub trait TestClient: Sized { /// Create test client from api url - fn test(api_url: &SocketAddr, telemetry_url: &SocketAddr) -> Self; + fn test(api_url: &SocketAddr) -> Self; /// Create test client from api url and keypair - fn test_with_key(api_url: &SocketAddr, telemetry_url: &SocketAddr, keys: KeyPair) -> Self; + fn test_with_key(api_url: &SocketAddr, keys: KeyPair) -> Self; /// Create test client from api url, keypair, and account id - fn test_with_account( - api_url: &SocketAddr, - telemetry_url: &SocketAddr, - keys: KeyPair, - account_id: &AccountId, - ) -> Self; + fn test_with_account(api_url: &SocketAddr, keys: KeyPair, account_id: &AccountId) -> Self; /// Loop for events with filter and handler function fn for_each_event(self, event_filter: FilterBox, f: impl Fn(Result)); @@ -711,7 +692,7 @@ pub trait TestClient: Sized { /// # Errors /// If predicate is not satisfied, after maximum retries. fn submit_till( - &mut self, + &self, instruction: impl Instruction + Debug + Clone, request: R, f: impl Fn(::Target) -> bool, @@ -726,8 +707,8 @@ pub trait TestClient: Sized { /// # Errors /// If predicate is not satisfied, after maximum retries. fn submit_all_till( - &mut self, - instructions: Vec, + &self, + instructions: Vec, request: R, f: impl Fn(::Target) -> bool, ) -> eyre::Result<()> @@ -741,7 +722,7 @@ pub trait TestClient: Sized { /// # Errors /// If predicate is not satisfied after maximum retries. fn poll_request( - &mut self, + &self, request: R, f: impl Fn(::Target) -> bool, ) -> eyre::Result<()> @@ -755,7 +736,7 @@ pub trait TestClient: Sized { /// # Errors /// If predicate is not satisfied after maximum retries. fn poll_request_with_period( - &mut self, + &self, request: R, period: Duration, max_attempts: u32, @@ -780,7 +761,7 @@ impl TestRuntime for Runtime { impl TestConfiguration for Configuration { fn test() -> Self { let mut sample_proxy = - iroha::samples::get_config_proxy(HashSet::new(), Some(get_key_pair())); + iroha::samples::get_config_proxy(UniqueVec::new(), Some(get_key_pair())); let env_proxy = ConfigurationProxy::from_std_env().expect("Test env variables should parse properly"); let (public_key, private_key) = KeyPair::generate().unwrap().into(); @@ -801,39 +782,30 @@ impl TestConfiguration for Configuration { } impl TestClientConfiguration for ClientConfiguration { - fn test(api_url: &SocketAddr, telemetry_url: &SocketAddr) -> Self { + fn test(api_url: &SocketAddr) -> Self { let mut configuration = iroha_client::samples::get_client_config(&get_key_pair()); configuration.torii_api_url = format!("http://{api_url}") .parse() .expect("Should be valid url"); - configuration.torii_telemetry_url = format!("http://{telemetry_url}") - .parse() - .expect("Should be valid url"); configuration } } impl TestClient for Client { - fn test(api_url: &SocketAddr, telemetry_url: &SocketAddr) -> Self { - Client::new(&ClientConfiguration::test(api_url, telemetry_url)) - .expect("Invalid client configuration") + fn test(api_url: &SocketAddr) -> Self { + Client::new(&ClientConfiguration::test(api_url)).expect("Invalid client configuration") } - fn test_with_key(api_url: &SocketAddr, telemetry_url: &SocketAddr, keys: KeyPair) -> Self { - let mut configuration = ClientConfiguration::test(api_url, telemetry_url); + fn test_with_key(api_url: &SocketAddr, keys: KeyPair) -> Self { + let mut configuration = ClientConfiguration::test(api_url); let (public_key, private_key) = keys.into(); configuration.public_key = public_key; configuration.private_key = private_key; Client::new(&configuration).expect("Invalid client configuration") } - fn test_with_account( - api_url: &SocketAddr, - telemetry_url: &SocketAddr, - keys: KeyPair, - account_id: &AccountId, - ) -> Self { - let mut configuration = ClientConfiguration::test(api_url, telemetry_url); + fn test_with_account(api_url: &SocketAddr, keys: KeyPair, account_id: &AccountId) -> Self { + let mut configuration = ClientConfiguration::test(api_url); configuration.account_id = account_id.clone(); let (public_key, private_key) = keys.into(); configuration.public_key = public_key; @@ -851,7 +823,7 @@ impl TestClient for Client { } fn submit_till( - &mut self, + &self, instruction: impl Instruction + Debug + Clone, request: R, f: impl Fn(::Target) -> bool, @@ -867,8 +839,8 @@ impl TestClient for Client { } fn submit_all_till( - &mut self, - instructions: Vec, + &self, + instructions: Vec, request: R, f: impl Fn(::Target) -> bool, ) -> eyre::Result<()> @@ -883,7 +855,7 @@ impl TestClient for Client { } fn poll_request_with_period( - &mut self, + &self, request: R, period: Duration, max_attempts: u32, @@ -906,7 +878,7 @@ impl TestClient for Client { } fn poll_request( - &mut self, + &self, request: R, f: impl Fn(::Target) -> bool, ) -> eyre::Result<()> diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml index f8095874243..d3f9684b8b2 100644 --- a/crypto/Cargo.toml +++ b/crypto/Cargo.toml @@ -7,13 +7,35 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [features] default = ["std"] # Enable static linkage of the rust standard library. # Please refer to https://docs.rust-embedded.org/book/intro/no-std.html -std = ["ursa"] -# Force static linking -vendored = ["openssl-sys"] +std = [ + "dep:blake2", + "dep:digest", + "dep:sha2", + "dep:hkdf", + "dep:amcl", + "dep:amcl_wrapper", + "dep:signature", + "dep:ed25519-dalek", + "dep:curve25519-dalek", + "dep:x25519-dalek", + "dep:rand", + "dep:rand_chacha", + "dep:zeroize", + "dep:arrayref", + "dep:aead", + "dep:chacha20poly1305", + "dep:elliptic-curve", + "dep:k256", + "dep:thiserror", + "displaydoc/std", +] # Replace structures and methods with FFI equivalents to facilitate dynamic linkage (mainly used in smartcontracts) #ffi_import = ["iroha_ffi", "iroha_primitives/ffi_import"] @@ -31,10 +53,42 @@ parity-scale-codec = { workspace = true, features = ["derive", "full"] } serde = { workspace = true, features = ["derive"] } serde_with = { workspace = true, features = ["macros"] } hex = { workspace = true, features = ["alloc", "serde"] } -openssl-sys = { version = "0.9.80", features = ["vendored"], optional = true } -ursa = { workspace = true, optional = true } getset = { workspace = true } +thiserror = { version = "1.0.50", optional = true } +displaydoc = { version = "0.2.4", default-features = false } + +digest = { version = "0.10.7", optional = true } +blake2 = { version = "0.10.6", optional = true } +sha2 = { version = "0.10.8", optional = true } +hkdf = { version = "0.12.3", optional = true } +amcl = { version = "0.2.0", optional = true, default-features = false, features = ["secp256k1"] } +amcl_wrapper = { version = "0.4.0", optional = true } + +signature = { version = "2.1.0", optional = true } +ed25519-dalek = { version = "2.0.0", optional = true, features = ["rand_core"] } +curve25519-dalek = { version = "4.1.1", optional = true } +x25519-dalek = { version = "2.0.0", optional = true, features = ["static_secrets"] } + +rand = { workspace = true, optional = true } +rand_chacha = { version = "0.3.1", optional = true } + + +zeroize = { version = "1.6.0", optional = true } +arrayref = { version = "0.3.7", optional = true } + +aead = { version = "0.5.2", optional = true } +chacha20poly1305 = { version = "0.10.1", optional = true } + +elliptic-curve = { version = "0.13.6", optional = true } +k256 = { version = "0.13.1", optional = true, features = ["ecdsa", "sha256"]} + [dev-dependencies] hex-literal = { workspace = true } serde_json = { workspace = true } + +# these crypto libraries are not used to implement actual crypto algorithms +# but to test some of the primitives against them +secp256k1 = { version = "0.28.0", features = ["rand", "serde"] } +libsodium-sys-stable = "1.20.3" +openssl = { version = "0.10.59", features = ["vendored"] } diff --git a/crypto/build.rs b/crypto/build.rs index c6b3b4910d9..364331891b5 100644 --- a/crypto/build.rs +++ b/crypto/build.rs @@ -6,7 +6,6 @@ fn main() { let ffi_import = std::env::var_os("CARGO_FEATURE_FFI_IMPORT").is_some(); let ffi_export = std::env::var_os("CARGO_FEATURE_FFI_EXPORT").is_some(); - #[allow(clippy::print_stderr)] if ffi_import && ffi_export { eprintln!("cargo:warning=Features `ffi_export` and `ffi_import` are mutually exclusive"); eprintln!("cargo:warning=When both active, `ffi_import` feature takes precedence"); diff --git a/crypto/src/encryption/chacha20poly1305.rs b/crypto/src/encryption/chacha20poly1305.rs new file mode 100644 index 00000000000..5def1a5932a --- /dev/null +++ b/crypto/src/encryption/chacha20poly1305.rs @@ -0,0 +1,125 @@ +use aead::{ + generic_array::{ + typenum::{U0, U12, U16, U32, U36}, + GenericArray, + }, + Aead, AeadCore, Error, KeyInit, KeySizeUser, Payload, +}; +use chacha20poly1305::ChaCha20Poly1305 as SysChaCha20Poly1305; + +use super::Encryptor; + +/// `ChaCha20Poly1305` is a symmetric encryption algorithm that uses the `ChaCha20` stream cipher +#[derive(Debug, Copy, Clone, Eq, PartialEq)] +pub struct ChaCha20Poly1305 { + key: GenericArray, +} + +impl Encryptor for ChaCha20Poly1305 { + type MinSize = U36; +} + +impl KeySizeUser for ChaCha20Poly1305 { + type KeySize = U32; +} + +impl KeyInit for ChaCha20Poly1305 { + fn new(key: &GenericArray) -> Self { + Self { key: *key } + } +} + +impl AeadCore for ChaCha20Poly1305 { + type NonceSize = U12; + type TagSize = U16; + type CiphertextOverhead = U0; +} + +// false positives: eliding lifetimes here requires an unstable feature `anonymous_lifetime_in_impl_trait` +#[allow(single_use_lifetimes)] +impl Aead for ChaCha20Poly1305 { + fn encrypt<'msg, 'aad>( + &self, + nonce: &GenericArray, + plaintext: impl Into>, + ) -> Result, Error> { + let aead = SysChaCha20Poly1305::new(&self.key); + let ciphertext = aead.encrypt(nonce, plaintext)?; + Ok(ciphertext) + } + + fn decrypt<'msg, 'aad>( + &self, + nonce: &GenericArray, + ciphertext: impl Into>, + ) -> Result, Error> { + let aead = SysChaCha20Poly1305::new(&self.key); + let plaintext = aead.decrypt(nonce, ciphertext)?; + Ok(plaintext) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn encrypt_easy_works() { + let cipher = ChaCha20Poly1305::new(&ChaCha20Poly1305::key_gen().unwrap()); + let aad = Vec::new(); + let message = b"Hello and Goodbye!".to_vec(); + let ciphertext = cipher.encrypt_easy(&aad, &message).unwrap(); + let decrypted_message = cipher.decrypt_easy(&aad, &ciphertext).unwrap(); + assert_eq!(message, decrypted_message); + } + + #[test] + fn encrypt_works() { + let cipher = ChaCha20Poly1305::new(&ChaCha20Poly1305::key_gen().unwrap()); + let nonce = ChaCha20Poly1305::nonce_gen().unwrap(); + let aad = b"encrypt test".to_vec(); + let message = b"Hello and Goodbye!".to_vec(); + let payload = Payload { + msg: message.as_slice(), + aad: aad.as_slice(), + }; + let ciphertext = cipher.encrypt(&nonce, payload).unwrap(); + let payload = Payload { + msg: ciphertext.as_slice(), + aad: aad.as_slice(), + }; + let decrypted_message = cipher.decrypt(&nonce, payload).unwrap(); + assert_eq!(message, decrypted_message); + } + + #[test] + fn decrypt_should_fail() { + let cipher = ChaCha20Poly1305::new(&ChaCha20Poly1305::key_gen().unwrap()); + let aad = b"decrypt should fail".to_vec(); + let message = b"Hello and Goodbye!".to_vec(); + let res = cipher.encrypt_easy(&aad, &message); + let mut ciphertext = res.unwrap(); + + let aad = b"decrypt should succeed".to_vec(); + cipher.decrypt_easy(&aad, &ciphertext).unwrap_err(); + + let aad = b"decrypt should fail".to_vec(); + ciphertext[0] ^= ciphertext[1]; + cipher.decrypt_easy(&aad, &ciphertext).unwrap_err(); + } + + // TODO: this should be tested for, but only after we integrate with secrecy/zeroize + // #[test] + // fn zeroed_on_drop() { + // let mut aes = ChaCha20Poly1305::new(&ChaCha20Poly1305::key_gen().unwrap()); + // aes.zeroize(); + // + // fn as_bytes(x: &T) -> &[u8] { + // use std::{mem, slice}; + // + // unsafe { slice::from_raw_parts(x as *const T as *const u8, mem::size_of_val(x)) } + // } + // + // assert!(as_bytes(&aes.key).iter().all(|b| *b == 0u8)); + // } +} diff --git a/crypto/src/encryption/mod.rs b/crypto/src/encryption/mod.rs new file mode 100644 index 00000000000..d72deacc7fb --- /dev/null +++ b/crypto/src/encryption/mod.rs @@ -0,0 +1,240 @@ +//! A suite of Authenticated Encryption with Associated Data (AEAD) cryptographic ciphers. +//! +//! Each AEAD algorithm provides [`SymmetricEncryptor::encrypt_easy`] and [`SymmetricEncryptor::decrypt_easy`] methods which hides the complexity +//! of generating a secure nonce of appropriate size with the ciphertext. +//! The [`SymmetricEncryptor::encrypt_easy`] prepends the nonce to the front of the ciphertext and [`SymmetricEncryptor::decrypt_easy`] expects +//! the nonce to be prepended to the front of the ciphertext. +//! +//! More advanced users may use [`SymmetricEncryptor::encrypt`] and [`SymmetricEncryptor::decrypt`] directly. These two methods require the +//! caller to supply a nonce with sufficient entropy and should never be reused when encrypting +//! with the same `key`. +//! +//! The convenience struct [`SymmetricEncryptor`] exists to allow users to easily switch between +//! algorithms by using any algorithm that implements the [`Encryptor`] trait. +//! +//! [`ChaCha20Poly1305`] is the only algorithm currently supported, +//! as it is the only one used by the iroha p2p transport protocol. + +mod chacha20poly1305; + +use aead::{ + generic_array::{typenum::Unsigned, ArrayLength, GenericArray}, + Aead, Error as AeadError, KeyInit, Payload, +}; +use displaydoc::Display; +use rand::{rngs::OsRng, RngCore}; +use thiserror::Error; + +pub use self::chacha20poly1305::ChaCha20Poly1305; +use crate::SessionKey; + +/// An error that can occur during encryption or decryption +#[derive(Error, Display, Debug)] +pub enum Error { + /// Failed to generate nonce for an encryption operation + NonceGeneration(#[source] rand::Error), + /// Failed to encrypt data + Encryption(AeadError), + /// Failed to decrypt data + Decryption(AeadError), + /// Not enough data to decrypt message + NotEnoughData, +} + +// Helpful for generating bytes using the operating system random number generator +fn random_vec(bytes: usize) -> Result, Error> { + let mut value = vec![0u8; bytes]; + OsRng + .try_fill_bytes(value.as_mut_slice()) + // RustCrypto errors don't have any details, can't propagate the error + .map_err(Error::NonceGeneration)?; + Ok(value) +} + +fn random_bytes>() -> Result, Error> { + Ok(GenericArray::clone_from_slice( + random_vec(T::to_usize())?.as_slice(), + )) +} + +/// A generic symmetric encryption wrapper +/// +/// # Usage +/// +/// ``` +/// use iroha_crypto::encryption::{SymmetricEncryptor, ChaCha20Poly1305}; +/// +/// let key: Vec = (0..0x20).collect(); +/// let encryptor = SymmetricEncryptor::::new_with_key(&key); +/// let aad = b"Using ChaCha20Poly1305 to encrypt data"; +/// let message = b"Hidden message"; +/// let res = encryptor.encrypt_easy(aad.as_ref(), message.as_ref()); +/// assert!(res.is_ok()); +/// +/// let ciphertext = res.unwrap(); +/// let res = encryptor.decrypt_easy(aad.as_ref(), ciphertext.as_slice()); +/// assert_eq!(res.unwrap().as_slice(), message); +/// ``` +#[derive(Debug, Clone)] +pub struct SymmetricEncryptor { + encryptor: E, +} + +impl SymmetricEncryptor { + /// Create a new [`SymmetricEncryptor`] using the provided `encryptor` + pub fn new(encryptor: E) -> Self { + Self { encryptor } + } + + /// Create a new [`SymmetricEncryptor`] from a [`SessionKey`] + pub fn new_from_session_key(key: &SessionKey) -> Self { + Self::new(::new(GenericArray::from_slice(&key.0))) + } + /// Create a new [`SymmetricEncryptor`] from key bytes + pub fn new_with_key>(key: A) -> Self { + Self { + encryptor: ::new(GenericArray::from_slice(key.as_ref())), + } + } + + /// Encrypt `plaintext` and integrity protect `aad`. The result is the ciphertext. + /// This method handles safely generating a `nonce` and prepends it to the ciphertext + /// + /// # Errors + /// + /// This function will return an error if nonce generation or encryption fails + pub fn encrypt_easy>(&self, aad: A, plaintext: A) -> Result, Error> { + self.encryptor.encrypt_easy(aad, plaintext) + } + + /// Encrypt `plaintext` and integrity protect `aad`. The result is the ciphertext. + /// + /// # Errors + /// + /// This function will return an error if encryption fails + pub fn encrypt>( + &self, + nonce: A, + aad: A, + plaintext: A, + ) -> Result, Error> { + let nonce = GenericArray::from_slice(nonce.as_ref()); + let payload = Payload { + msg: plaintext.as_ref(), + aad: aad.as_ref(), + }; + self.encryptor + .encrypt(nonce, payload) + .map_err(Error::Encryption) + } + + /// Decrypt `ciphertext` using integrity protected `aad`. The result is the plaintext if successful + /// or an error if the `ciphetext` cannot be decrypted due to tampering, an incorrect `aad` value, + /// or incorrect key. + /// `aad` must be the same value used in `encrypt_easy`. Expects the nonce to be prepended to + /// the `ciphertext` + /// + /// # Errors + /// + /// This function will return an error if decryption fails + pub fn decrypt_easy>(&self, aad: A, ciphertext: A) -> Result, Error> { + self.encryptor.decrypt_easy(aad, ciphertext) + } + + /// Decrypt `ciphertext` using integrity protected `aad`. The result is the plaintext if successful + /// or an error if the `ciphetext` cannot be decrypted due to tampering, an incorrect `aad` value, + /// or incorrect key. + /// `aad` must be the same value used in `encrypt_easy`. + /// + /// # Errors + /// + /// This function will return an error if decryption fails + pub fn decrypt>( + &self, + nonce: A, + aad: A, + ciphertext: A, + ) -> Result, Error> { + let nonce = GenericArray::from_slice(nonce.as_ref()); + let payload = Payload { + msg: ciphertext.as_ref(), + aad: aad.as_ref(), + }; + self.encryptor + .decrypt(nonce, payload) + .map_err(Error::Decryption) + } +} + +impl Default for SymmetricEncryptor { + fn default() -> Self { + SymmetricEncryptor { + encryptor: E::default(), + } + } +} + +/// Generic encryptor trait that all ciphers should extend. +pub trait Encryptor: Aead + KeyInit { + /// The minimum size that the ciphertext will yield from plaintext + type MinSize: ArrayLength; + + /// A simple API to encrypt a message with authenticated associated data. + /// + /// This API handles nonce generation for you and prepends it in front of the ciphertext. Use [`Encryptor::decrypt_easy`] to decrypt the message encrypted this way. + /// + /// # Errors + /// + /// This function will return an error if nonce generation or encryption fails + fn encrypt_easy>(&self, aad: M, plaintext: M) -> Result, Error> { + let nonce = Self::nonce_gen()?; + let payload = Payload { + msg: plaintext.as_ref(), + aad: aad.as_ref(), + }; + let ciphertext = self.encrypt(&nonce, payload).map_err(Error::Encryption)?; + let mut result = nonce.to_vec(); + result.extend_from_slice(ciphertext.as_slice()); + Ok(result) + } + + /// A simple API to decrypt a message with authenticated associated data. + /// + /// This API expects the nonce to be prepended to the ciphertext. Use [`Encryptor::encrypt_easy`] to encrypt the message this way. + /// + /// # Errors + /// + /// This function will return an error if decryption fails + fn decrypt_easy>(&self, aad: M, ciphertext: M) -> Result, Error> { + let ciphertext = ciphertext.as_ref(); + if ciphertext.len() < Self::MinSize::to_usize() { + return Err(Error::NotEnoughData); + } + + let nonce = GenericArray::from_slice(&ciphertext[..Self::NonceSize::to_usize()]); + let payload = Payload { + msg: &ciphertext[Self::NonceSize::to_usize()..], + aad: aad.as_ref(), + }; + let plaintext = self.decrypt(nonce, payload).map_err(Error::Decryption)?; + Ok(plaintext) + } + + /// Generate a new key for this encryptor + /// + /// # Errors + /// + /// This function will return an error if the operating system random number generator fails + fn key_gen() -> Result, Error> { + random_bytes() + } + + /// Generate a new nonce for this encryptor + /// + /// # Errors + /// + /// This function will return an error if the operating system random number generator fails + fn nonce_gen() -> Result, Error> { + random_bytes() + } +} diff --git a/crypto/src/hash.rs b/crypto/src/hash.rs index dd77afd4229..bde357451c3 100644 --- a/crypto/src/hash.rs +++ b/crypto/src/hash.rs @@ -2,6 +2,11 @@ use alloc::{borrow::ToOwned as _, format, string::String, vec, vec::Vec}; use core::{hash, marker::PhantomData, num::NonZeroU8, str::FromStr}; +#[cfg(all(feature = "std", not(feature = "ffi_import")))] +use blake2::{ + digest::{Update, VariableOutput}, + Blake2bVar, +}; use derive_more::{DebugCustom, Deref, DerefMut, Display}; #[cfg(any(feature = "std", feature = "ffi_import"))] use iroha_macro::ffi_impl_opaque; @@ -9,11 +14,6 @@ use iroha_schema::{IntoSchema, TypeId}; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; use serde_with::DeserializeFromStr; -#[cfg(all(feature = "std", not(feature = "ffi_import")))] -use ursa::blake2::{ - digest::{Update, VariableOutput}, - VarBlake2b, -}; use crate::{error::Error, hex_decode}; @@ -46,7 +46,7 @@ impl Hash { /// Length of hash pub const LENGTH: usize = 32; - /// Wrap the given bytes; they must be prehashed with `VarBlake2b` + /// Wrap the given bytes; they must be prehashed with `Blake2bVar` pub fn prehashed(mut hash: [u8; Self::LENGTH]) -> Self { hash[Self::LENGTH - 1] |= 1; // SAFETY: @@ -69,8 +69,10 @@ impl Hash { impl Hash { /// Hash the given bytes. #[must_use] + // NOTE: Panic is predicated by implementation not user input + #[allow(clippy::missing_panics_doc)] pub fn new(bytes: impl AsRef<[u8]>) -> Self { - let vec_hash = VarBlake2b::new(Self::LENGTH) + let vec_hash = Blake2bVar::new(Self::LENGTH) .expect("Failed to initialize variable size hash") .chain(bytes) .finalize_boxed(); @@ -122,7 +124,7 @@ impl Encode for Hash { #[inline] fn encode_to(&self, dest: &mut T) { - self.as_ref().encode_to(dest) + self.as_ref().encode_to(dest); } #[inline] @@ -216,7 +218,7 @@ crate::ffi::ffi_item! { impl Clone for HashOf { fn clone(&self) -> Self { - Self(self.0, PhantomData) + *self } } impl Copy for HashOf {} @@ -230,7 +232,7 @@ impl Eq for HashOf {} impl PartialOrd for HashOf { fn partial_cmp(&self, other: &Self) -> Option { - self.0.partial_cmp(&other.0) + Some(self.cmp(other)) } } impl Ord for HashOf { @@ -241,7 +243,7 @@ impl Ord for HashOf { impl hash::Hash for HashOf { fn hash(&self, state: &mut H) { - self.0.hash(state) + self.0.hash(state); } } @@ -256,7 +258,7 @@ impl HashOf { /// Don't use this method if not required. #[inline] #[must_use] - pub const fn transmute(self) -> HashOf { + pub(crate) const fn transmute(self) -> HashOf { HashOf(self.0, PhantomData) } @@ -318,8 +320,6 @@ mod ffi { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - #[cfg(feature = "std")] #[cfg(not(feature = "ffi_import"))] use super::*; @@ -328,15 +328,12 @@ mod tests { #[cfg(feature = "std")] #[cfg(not(feature = "ffi_import"))] fn blake2_32b() { - let mut hasher = VarBlake2b::new(32).unwrap(); - hasher.update(hex_literal::hex!("6920616d2064617461")); - hasher.finalize_variable(|res| { - assert_eq!( - res[..], - hex_literal::hex!( - "BA67336EFD6A3DF3A70EEB757860763036785C182FF4CF587541A0068D09F5B2" - )[..] - ); - }) + let mut hasher = Blake2bVar::new(32).unwrap(); + hasher.update(&hex_literal::hex!("6920616d2064617461")); + assert_eq!( + hasher.finalize_boxed().as_ref(), + &hex_literal::hex!("BA67336EFD6A3DF3A70EEB757860763036785C182FF4CF587541A0068D09F5B2") + [..] + ); } } diff --git a/crypto/src/kex/mod.rs b/crypto/src/kex/mod.rs new file mode 100644 index 00000000000..f76bc4ac090 --- /dev/null +++ b/crypto/src/kex/mod.rs @@ -0,0 +1,43 @@ +//! A suite of Diffie-Hellman key exchange methods. +//! +//! [`X25519Sha256`] is the only key exchange scheme currently supported, +//! as it is the only one used by the iroha p2p transport protocol. + +mod x25519; + +pub use x25519::X25519Sha256; + +use crate::{Error, KeyGenOption, PrivateKey, PublicKey, SessionKey}; + +/// A Generic trait for key exchange schemes. Each scheme provides a way to generate keys and +/// do a diffie-hellman computation +pub trait KeyExchangeScheme { + /// Generate a new instance of the scheme + fn new() -> Self; + /// Create new keypairs. If + /// `options` is None, the keys are generated ephemerally from the `OsRng` + /// `options` is `UseSeed`, the keys are generated ephemerally from the sha256 hash of the seed which is + /// then used to seed the `ChaChaRng` + /// `options` is `FromPrivateKey`, the corresponding public key is returned. This should be used for + /// static Diffie-Hellman and loading a long-term key. + /// + /// # Errors + /// + /// Returns an error if the key generation fails. + fn keypair(&self, options: Option) -> Result<(PublicKey, PrivateKey), Error>; + /// Compute the diffie-hellman shared secret. + /// `local_private_key` is the key generated from calling `keypair` while + /// `remote_public_key` is the key received from a different call to `keypair` from another party. + fn compute_shared_secret( + &self, + local_private_key: &PrivateKey, + remote_public_key: &PublicKey, + ) -> SessionKey; + + /// Size of the shared secret in bytes. + const SHARED_SECRET_SIZE: usize; + /// Size of the public key in bytes. + const PUBLIC_KEY_SIZE: usize; + /// Size of the private key in bytes. + const PRIVATE_KEY_SIZE: usize; +} diff --git a/crypto/src/kex/x25519.rs b/crypto/src/kex/x25519.rs new file mode 100644 index 00000000000..63e5c7f6325 --- /dev/null +++ b/crypto/src/kex/x25519.rs @@ -0,0 +1,97 @@ +use arrayref::array_ref; +use iroha_primitives::const_vec::ConstVec; +use rand::{rngs::OsRng, SeedableRng}; +use rand_chacha::ChaChaRng; +use sha2::Digest; +use x25519_dalek::{PublicKey as X25519PublicKey, StaticSecret}; +use zeroize::Zeroize; + +const ALGORITHM: Algorithm = Algorithm::Ed25519; + +use super::KeyExchangeScheme; +use crate::{Algorithm, Error, KeyGenOption, PrivateKey, PublicKey, SessionKey}; + +/// Implements the [`KeyExchangeScheme`] using X25519 key exchange and SHA256 hash function. +#[derive(Copy, Clone)] +pub struct X25519Sha256; + +impl KeyExchangeScheme for X25519Sha256 { + fn new() -> Self { + Self + } + + fn keypair(&self, mut option: Option) -> Result<(PublicKey, PrivateKey), Error> { + let (pk, sk) = match option { + Some(KeyGenOption::UseSeed(ref mut s)) => { + let hash = sha2::Sha256::digest(s.as_slice()); + s.zeroize(); + let rng = ChaChaRng::from_seed(*array_ref!(hash.as_slice(), 0, 32)); + let sk = StaticSecret::random_from_rng(rng); + let pk = X25519PublicKey::from(&sk); + (pk, sk) + } + Some(KeyGenOption::FromPrivateKey(ref s)) => { + assert_eq!(s.digest_function, ALGORITHM); + let sk = StaticSecret::from(*array_ref!(&s.payload, 0, 32)); + let pk = X25519PublicKey::from(&sk); + (pk, sk) + } + None => { + let rng = OsRng; + let sk = StaticSecret::random_from_rng(rng); + let pk = X25519PublicKey::from(&sk); + (pk, sk) + } + }; + Ok(( + PublicKey { + digest_function: ALGORITHM, + payload: ConstVec::new(pk.as_bytes().to_vec()), + }, + PrivateKey { + digest_function: ALGORITHM, + payload: ConstVec::new(sk.to_bytes().to_vec()), + }, + )) + } + + fn compute_shared_secret( + &self, + local_private_key: &PrivateKey, + remote_public_key: &PublicKey, + ) -> SessionKey { + assert_eq!(local_private_key.digest_function, ALGORITHM); + assert_eq!(remote_public_key.digest_function, ALGORITHM); + let sk = StaticSecret::from(*array_ref!(&local_private_key.payload, 0, 32)); + let pk = X25519PublicKey::from(*array_ref!(&remote_public_key.payload, 0, 32)); + let shared_secret = sk.diffie_hellman(&pk); + let hash = sha2::Sha256::digest(shared_secret.as_bytes()); + SessionKey(ConstVec::new(hash.as_slice().to_vec())) + } + + const SHARED_SECRET_SIZE: usize = 32; + const PUBLIC_KEY_SIZE: usize = 32; + const PRIVATE_KEY_SIZE: usize = 32; +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn key_exchange() { + let scheme = X25519Sha256::new(); + let (public_key1, secret_key1) = scheme.keypair(None).unwrap(); + let _res = scheme.compute_shared_secret(&secret_key1, &public_key1); + let res = scheme.keypair(None); + let (public_key2, secret_key2) = res.unwrap(); + let _res = scheme.compute_shared_secret(&secret_key2, &public_key1); + let _res = scheme.compute_shared_secret(&secret_key1, &public_key2); + + let (public_key2, secret_key1) = scheme + .keypair(Some(KeyGenOption::FromPrivateKey(secret_key1))) + .unwrap(); + assert_eq!(public_key2, public_key1); + assert_eq!(secret_key1, secret_key1); + } +} diff --git a/crypto/src/lib.rs b/crypto/src/lib.rs index 496884bf9b7..eb11d4bf977 100755 --- a/crypto/src/lib.rs +++ b/crypto/src/lib.rs @@ -1,11 +1,16 @@ //! This module contains structures and implementations related to the cryptographic parts of the Iroha. #![cfg_attr(not(feature = "std"), no_std)] -#![allow(clippy::arithmetic_side_effects)] #[cfg(not(feature = "std"))] extern crate alloc; +#[cfg(feature = "std")] +#[cfg(not(feature = "ffi_import"))] +pub mod encryption; mod hash; +#[cfg(feature = "std")] +#[cfg(not(feature = "ffi_import"))] +pub mod kex; mod merkle; #[cfg(not(feature = "ffi_import"))] mod multihash; @@ -23,7 +28,10 @@ use core::{fmt, str::FromStr}; #[cfg(feature = "base64")] pub use base64; -use derive_more::Display; +#[cfg(feature = "std")] +#[cfg(not(feature = "ffi_import"))] +pub use blake2; +use derive_more::{DebugCustom, Display}; use error::{Error, NoSuchAlgorithm}; use getset::{CopyGetters, Getters}; pub use hash::*; @@ -37,21 +45,8 @@ use parity_scale_codec::{Decode, Encode}; use serde::Deserialize; use serde::Serialize; use serde_with::{DeserializeFromStr, SerializeDisplay}; -pub use signature::*; -#[cfg(feature = "std")] -#[cfg(not(feature = "ffi_import"))] -pub use ursa; -#[cfg(feature = "std")] -#[cfg(not(feature = "ffi_import"))] -use ursa::{ - keys::{KeyGenOption as UrsaKeyGenOption, PrivateKey as UrsaPrivateKey}, - signatures::{ - bls::{normal::Bls as BlsNormal, small::Bls as BlsSmall}, - ed25519::Ed25519Sha512, - secp256k1::EcdsaSecp256k1Sha256, - SignatureScheme, - }, -}; + +pub use self::signature::*; // Hiding constants is a bad idea. For one, you're forcing the user to // create temporaries, but also you're not actually hiding any @@ -68,7 +63,7 @@ pub const BLS_NORMAL: &str = "bls_normal"; pub const BLS_SMALL: &str = "bls_small"; ffi::ffi_item! { - /// Algorithm for hashing + /// Algorithm for hashing & signing #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default, DeserializeFromStr, SerializeDisplay, Decode, Encode, IntoSchema)] #[repr(u8)] pub enum Algorithm { @@ -118,36 +113,18 @@ impl FromStr for Algorithm { /// Options for key generation #[cfg(not(feature = "ffi_import"))] +#[cfg_attr( + any(not(feature = "std"), feature = "ffi_import"), + allow(unused_tuple_struct_fields) +)] #[derive(Debug, Clone)] -enum KeyGenOption { +pub enum KeyGenOption { /// Use seed UseSeed(Vec), /// Derive from private key FromPrivateKey(PrivateKey), } -#[cfg(feature = "std")] -#[cfg(not(feature = "ffi_import"))] -impl TryFrom for UrsaKeyGenOption { - type Error = NoSuchAlgorithm; - - fn try_from(key_gen_option: KeyGenOption) -> Result { - match key_gen_option { - KeyGenOption::UseSeed(seed) => Ok(Self::UseSeed(seed)), - KeyGenOption::FromPrivateKey(key) => { - let algorithm = key.digest_function(); - - match algorithm { - Algorithm::Ed25519 | Algorithm::Secp256k1 => { - Ok(Self::FromSecretKey(UrsaPrivateKey(key.payload.into_vec()))) - } - _ => Err(Self::Error {}), - } - } - } - } -} - ffi::ffi_item! { /// Configuration of key generation #[derive(Clone, Default)] @@ -245,33 +222,27 @@ impl KeyPair { /// Fails if decoding fails #[cfg(any(feature = "std", feature = "ffi_import"))] pub fn generate_with_configuration(configuration: KeyGenConfiguration) -> Result { - let key_gen_option: Option = - match (configuration.algorithm, configuration.key_gen_option) { - (Algorithm::Secp256k1, Some(KeyGenOption::UseSeed(seed))) if seed.len() < 32 => { - return Err(Error::KeyGen( - "secp256k1 seed for must be at least 32 bytes long".to_owned(), - )) - } - (_, key_gen_option) => key_gen_option, + let key_gen_option = match (configuration.algorithm, configuration.key_gen_option) { + (Algorithm::Secp256k1, Some(KeyGenOption::UseSeed(seed))) if seed.len() < 32 => { + return Err(Error::KeyGen( + "secp256k1 seed for must be at least 32 bytes long".to_owned(), + )) + } + (_, key_gen_option) => key_gen_option, + }; + + let (public_key, private_key) = match configuration.algorithm { + Algorithm::Ed25519 => signature::ed25519::Ed25519Sha512::keypair(key_gen_option), + Algorithm::Secp256k1 => { + signature::secp256k1::EcdsaSecp256k1Sha256::keypair(key_gen_option) } - .map(TryInto::try_into) - .transpose()?; - let (mut public_key, mut private_key) = match configuration.algorithm { - Algorithm::Ed25519 => Ed25519Sha512.keypair(key_gen_option), - Algorithm::Secp256k1 => EcdsaSecp256k1Sha256::new().keypair(key_gen_option), - Algorithm::BlsNormal => BlsNormal::new().keypair(key_gen_option), - Algorithm::BlsSmall => BlsSmall::new().keypair(key_gen_option), + Algorithm::BlsNormal => signature::bls::BlsNormal::keypair(key_gen_option), + Algorithm::BlsSmall => signature::bls::BlsSmall::keypair(key_gen_option), }?; Ok(Self { - public_key: PublicKey { - digest_function: configuration.algorithm, - payload: ConstVec::new(core::mem::take(&mut public_key.0)), - }, - private_key: PrivateKey { - digest_function: configuration.algorithm, - payload: ConstVec::new(core::mem::take(&mut private_key.0)), - }, + public_key, + private_key, }) } } @@ -308,8 +279,9 @@ impl From for (PublicKey, PrivateKey) { ffi::ffi_item! { /// Public Key used in signatures. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, CopyGetters)] - #[cfg_attr(not(feature="ffi_import"), derive(derive_more::DebugCustom, Hash, DeserializeFromStr, SerializeDisplay, Decode, Encode, IntoSchema))] - #[cfg_attr(not(feature="ffi_import"), debug(fmt = "{{digest: {digest_function}, payload: {payload:X?}}}"))] + #[cfg_attr(not(feature="ffi_import"), derive(DebugCustom, Display, Hash, DeserializeFromStr, SerializeDisplay, Decode, Encode, IntoSchema))] + #[cfg_attr(not(feature="ffi_import"), debug(fmt = "{{digest: {digest_function}, payload: {}}}", "self.normalize()"))] + #[cfg_attr(not(feature="ffi_import"), display(fmt = "{}", "self.normalize()"))] pub struct PublicKey { /// Digest function #[getset(get_copy = "pub")] @@ -321,6 +293,19 @@ ffi::ffi_item! { #[ffi_impl_opaque] impl PublicKey { + /// Creates a new public key from raw bytes received from elsewhere + pub fn from_raw(algorithm: Algorithm, payload: ConstVec) -> Self { + Self { + digest_function: algorithm, + payload, + } + } + + /// Extracts the raw bytes from public key + pub fn into_raw(self) -> (Algorithm, ConstVec) { + (self.digest_function, self.payload) + } + /// Key payload // TODO: Derive with getset once FFI impl is fixed pub fn payload(&self) -> &[u8] { @@ -330,20 +315,38 @@ impl PublicKey { #[cfg(feature = "std")] fn try_from_private(private_key: PrivateKey) -> Result { let digest_function = private_key.digest_function(); - let key_gen_option = Some(UrsaKeyGenOption::FromSecretKey(UrsaPrivateKey( - private_key.payload.into_vec(), - ))); - - let (mut public_key, _) = match digest_function { - Algorithm::Ed25519 => Ed25519Sha512.keypair(key_gen_option), - Algorithm::Secp256k1 => EcdsaSecp256k1Sha256::new().keypair(key_gen_option), - Algorithm::BlsNormal => BlsNormal::new().keypair(key_gen_option), - Algorithm::BlsSmall => BlsSmall::new().keypair(key_gen_option), + let key_gen_option = Some(KeyGenOption::FromPrivateKey(private_key)); + + let (public_key, _) = match digest_function { + Algorithm::Ed25519 => signature::ed25519::Ed25519Sha512::keypair(key_gen_option), + Algorithm::Secp256k1 => { + signature::secp256k1::EcdsaSecp256k1Sha256::keypair(key_gen_option) + } + Algorithm::BlsNormal => signature::bls::BlsNormal::keypair(key_gen_option), + Algorithm::BlsSmall => signature::bls::BlsSmall::keypair(key_gen_option), }?; - Ok(PublicKey { - digest_function: private_key.digest_function, - payload: ConstVec::new(core::mem::take(&mut public_key.0)), + Ok(public_key) + } + + /// Construct `PrivateKey` from hex encoded string + /// + /// # Errors + /// + /// - If the given payload is not hex encoded + /// - If the given payload is not a valid private key + #[cfg(feature = "std")] + pub fn from_hex(digest_function: Algorithm, payload: &str) -> Result { + let payload = hex_decode(payload)?; + let payload = ConstVec::new(payload); + + // NOTE: PrivateKey does some validation by generating a public key from the provided bytes + // we can't really do this for PublicKey + // this can be solved if the keys used here would be actually aware of the underlying crypto primitive types + // instead of just being raw bytes + Ok(Self { + digest_function, + payload, }) } } @@ -362,8 +365,8 @@ impl FromStr for PublicKey { } #[cfg(not(feature = "ffi_import"))] -impl fmt::Display for PublicKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { +impl PublicKey { + fn normalize(&self) -> String { let multihash: &multihash::Multihash = &self .clone() .try_into() @@ -375,7 +378,7 @@ impl fmt::Display for PublicKey { let dig_size = hex::encode(bytes_iter.by_ref().take(1).collect::>()); let key = hex::encode_upper(bytes_iter.by_ref().collect::>()); - write!(f, "{fn_code}{dig_size}{key}") + format!("{fn_code}{dig_size}{key}") } } @@ -391,8 +394,9 @@ impl From for PublicKey { ffi::ffi_item! { /// Private Key used in signatures. #[derive(Clone, PartialEq, Eq, CopyGetters)] - #[cfg_attr(not(feature="ffi_import"), derive(derive_more::DebugCustom, Serialize))] - #[cfg_attr(not(feature="ffi_import"), debug(fmt = "{{digest: {digest_function}, payload: {payload:X?}}}"))] + #[cfg_attr(not(feature="ffi_import"), derive(DebugCustom, Display, Serialize))] + #[cfg_attr(not(feature="ffi_import"), debug(fmt = "{{digest: {digest_function}, payload: {}}}", "hex::encode_upper(payload)"))] + #[cfg_attr(not(feature="ffi_import"), display(fmt = "{}", "hex::encode_upper(payload)"))] pub struct PrivateKey { /// Digest function #[getset(get_copy = "pub")] @@ -403,12 +407,6 @@ ffi::ffi_item! { } } -impl fmt::Display for PrivateKey { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{}", hex::encode_upper(&self.payload)) - } -} - #[ffi_impl_opaque] impl PrivateKey { /// Key payload @@ -441,7 +439,7 @@ impl PrivateKey { /// - If the given payload is not hex encoded /// - If the given payload is not a valid private key #[cfg(feature = "std")] - pub fn from_hex(digest_function: Algorithm, payload: &[u8]) -> Result { + pub fn from_hex(digest_function: Algorithm, payload: &str) -> Result { let payload = hex_decode(payload)?; let payload = ConstVec::new(payload); @@ -478,6 +476,17 @@ impl<'de> Deserialize<'de> for PrivateKey { } } +/// A session key derived from a key exchange. Will usually be used for a symmetric encryption afterwards +#[allow(unused_tuple_struct_fields)] +pub struct SessionKey(ConstVec); + +impl SessionKey { + /// Expose the raw bytes of the session key + pub fn payload(&self) -> &[u8] { + self.0.as_ref() + } +} + /// Shim for decoding hexadecimal strings pub(crate) fn hex_decode + ?Sized>(payload: &T) -> Result, Error> { hex::decode(payload).map_err(|err| Error::Parse(err.to_string())) @@ -521,21 +530,6 @@ pub mod error { Other(String), } - #[cfg(feature = "std")] - #[cfg(not(feature = "ffi_import"))] - impl From for Error { - fn from(source: ursa::CryptoError) -> Self { - match source { - ursa::CryptoError::NoSuchAlgorithm(source) => Self::NoSuchAlgorithm(source), - ursa::CryptoError::ParseError(source) => Self::Parse(source), - ursa::CryptoError::SigningError(source) => Self::Signing(source), - ursa::CryptoError::KeyGenError(source) => Self::KeyGen(source), - ursa::CryptoError::DigestGenError(source) => Self::DigestGen(source), - ursa::CryptoError::GeneralError(source) => Self::Other(source), - } - } - } - #[cfg(feature = "std")] impl From for Error { fn from(source: NoSuchAlgorithm) -> Self { @@ -615,8 +609,6 @@ pub mod prelude { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use parity_scale_codec::{Decode, Encode}; #[cfg(all(feature = "std", not(feature = "ffi_import")))] use serde::Deserialize; @@ -640,7 +632,7 @@ mod tests { serde_json::to_string(&algorithm) .and_then(|algorithm| serde_json::from_str(&algorithm)) .unwrap_or_else(|_| panic!("Failed to de/serialize key {:?}", &algorithm)) - ) + ); } } #[test] @@ -662,7 +654,7 @@ mod tests { serde_json::to_string(&key_pair) .and_then(|key_pair| serde_json::from_str(&key_pair)) .unwrap_or_else(|_| panic!("Failed to de/serialize key {:?}", &key_pair)) - ) + ); } } @@ -682,7 +674,7 @@ mod tests { algorithm, decoded_algorithm, "Failed to decode encoded {:?}", &algorithm - ) + ); } } @@ -694,7 +686,7 @@ mod tests { .expect("Public key not in mulithash format"), PrivateKey::from_hex( Algorithm::Ed25519, - "93CA389FC2979F3F7D2A7F8B76C70DE6D5EAF5FA58D4F93CB8B0FB298D398ACC59C8A4DA1EBB5380F74ABA51F502714652FDCCE9611FAFB9904E4A3C4D382774".as_ref() + "93CA389FC2979F3F7D2A7F8B76C70DE6D5EAF5FA58D4F93CB8B0FB298D398ACC59C8A4DA1EBB5380F74ABA51F502714652FDCCE9611FAFB9904E4A3C4D382774" ).expect("Private key not hex encoded")).is_ok()); assert!(KeyPair::new("ea0161040FCFADE2FC5D9104A9ACF9665EA545339DDF10AE50343249E01AF3B8F885CD5D52956542CCE8105DB3A2EC4006E637A7177FAAEA228C311F907DAAFC254F22667F1A1812BB710C6F4116A1415275D27BB9FB884F37E8EF525CC31F3945E945FA" @@ -702,7 +694,7 @@ mod tests { .expect("Public key not in mulithash format"), PrivateKey::from_hex( Algorithm::BlsNormal, - "0000000000000000000000000000000049BF70187154C57B97AF913163E8E875733B4EAF1F3F0689B31CE392129493E9".as_ref() + "0000000000000000000000000000000049BF70187154C57B97AF913163E8E875733B4EAF1F3F0689B31CE392129493E9" ).expect("Private key not hex encoded")).is_ok()); } @@ -729,7 +721,7 @@ mod tests { public_key, decoded_public_key, "Failed to decode encoded Public Key{:?}", &public_key - ) + ); } } @@ -738,13 +730,13 @@ mod tests { fn invalid_private_key() { assert!(PrivateKey::from_hex( Algorithm::Ed25519, - "0000000000000000000000000000000049BF70187154C57B97AF913163E8E875733B4EAF1F3F0689B31CE392129493E9".as_ref() + "0000000000000000000000000000000049BF70187154C57B97AF913163E8E875733B4EAF1F3F0689B31CE392129493E9" ).is_err()); assert!( PrivateKey::from_hex( Algorithm::BlsNormal, - "93CA389FC2979F3F7D2A7F8B76C70DE6D5EAF5FA58D4F93CB8B0FB298D398ACC59C8A4DA1EBB5380F74ABA51F502714652FDCCE9611FAFB9904E4A3C4D382774".as_ref() + "93CA389FC2979F3F7D2A7F8B76C70DE6D5EAF5FA58D4F93CB8B0FB298D398ACC59C8A4DA1EBB5380F74ABA51F502714652FDCCE9611FAFB9904E4A3C4D382774" ).is_err()); } @@ -756,7 +748,7 @@ mod tests { .expect("Public key not in mulithash format"), PrivateKey::from_hex( Algorithm::Ed25519, - "3A7991AF1ABB77F3FD27CC148404A6AE4439D095A63591B77C788D53F708A02A1509A611AD6D97B01D871E58ED00C8FD7C3917B6CA61A8C2833A19E000AAC2E4".as_ref() + "3A7991AF1ABB77F3FD27CC148404A6AE4439D095A63591B77C788D53F708A02A1509A611AD6D97B01D871E58ED00C8FD7C3917B6CA61A8C2833A19E000AAC2E4" ).expect("Private key not valid")).is_err()); assert!(KeyPair::new("ea0161040FCFADE2FC5D9104A9ACF9665EA545339DDF10AE50343249E01AF3B8F885CD5D52956542CCE8105DB3A2EC4006E637A7177FAAEA228C311F907DAAFC254F22667F1A1812BB710C6F4116A1415275D27BB9FB884F37E8EF525CC31F3945E945FA" @@ -764,7 +756,7 @@ mod tests { .expect("Public key not in mulithash format"), PrivateKey::from_hex( Algorithm::BlsNormal, - "000000000000000000000000000000002F57460183837EFBAC6AA6AB3B8DBB7CFFCFC59E9448B7860A206D37D470CBA3".as_ref() + "000000000000000000000000000000002F57460183837EFBAC6AA6AB3B8DBB7CFFCFC59E9448B7860A206D37D470CBA3" ).expect("Private key not valid")).is_err()); } @@ -818,7 +810,7 @@ mod tests { } ), "eb01c1040CB3231F601E7245A6EC9A647B450936F707CA7DC347ED258586C1924941D8BC38576473A8BA3BB2C37E3E121130AB67103498A96D0D27003E3AD960493DA79209CF024E2AA2AE961300976AEEE599A31A5E1B683EAA1BCFFC47B09757D20F21123C594CF0EE0BAF5E1BDD272346B7DC98A8F12C481A6B28174076A352DA8EAE881B90911013369D7FA960716A5ABC5314307463FA2285A5BF2A5B5C6220D68C2D34101A91DBFC531C5B9BBFB2245CCC0C50051F79FC6714D16907B1FC40E0C0" - ) + ); } #[cfg(all(feature = "std", not(feature = "ffi_import")))] #[derive(Debug, PartialEq, Deserialize, Serialize)] @@ -924,7 +916,7 @@ mod tests { "0000000000000000000000000000000060F3C1AC9ADDBBED8DB83BC1B2EF22139FB049EECB723A557A41CA1A4B1FED63"), } } - ) + ); } #[test] @@ -943,6 +935,6 @@ mod tests { Err(Error::KeyGen( "secp256k1 seed for must be at least 32 bytes long".to_owned() )) - ) + ); } } diff --git a/crypto/src/merkle.rs b/crypto/src/merkle.rs index 8cd0db82fe8..84d63cb1d8c 100644 --- a/crypto/src/merkle.rs +++ b/crypto/src/merkle.rs @@ -1,10 +1,4 @@ //! Merkle tree implementation. -#![allow( - clippy::std_instead_of_alloc, - clippy::std_instead_of_core, - clippy::arithmetic_side_effects -)] - #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; #[cfg(feature = "std")] @@ -186,7 +180,7 @@ impl MerkleTree { } self.0.push(Some(hash)); - self.update(self.len().saturating_sub(1)) + self.update(self.len().saturating_sub(1)); } #[cfg(feature = "std")] @@ -299,7 +293,7 @@ mod tests { let tree = hashes.clone().into_iter().collect::>(); for i in 0..N_LEAVES as usize * 2 { - assert_eq!(tree.get_leaf_hash(i).as_ref(), hashes.get(i)) + assert_eq!(tree.get_leaf_hash(i).as_ref(), hashes.get(i)); } for (testee_hash, tester_hash) in tree.into_iter().zip(hashes) { assert_eq!(testee_hash, tester_hash); diff --git a/crypto/src/multihash.rs b/crypto/src/multihash.rs index fa382a71f44..579f9708d02 100644 --- a/crypto/src/multihash.rs +++ b/crypto/src/multihash.rs @@ -1,6 +1,4 @@ //! Module with multihash implementation -#![allow(clippy::std_instead_of_core)] - #[cfg(not(feature = "std"))] use alloc::{ string::{String, ToString as _}, @@ -215,8 +213,6 @@ impl From for MultihashConvertError { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use super::*; use crate::hex_decode; @@ -237,7 +233,7 @@ mod tests { hex_decode("ed01201509A611AD6D97B01D871E58ED00C8FD7C3917B6CA61A8C2833A19E000AAC2E4") .expect("Failed to decode"), bytes - ) + ); } #[test] @@ -252,7 +248,7 @@ mod tests { hex_decode("ed01201509A611AD6D97B01D871E58ED00C8FD7C3917B6CA61A8C2833A19E000AAC2E4") .expect("Failed to decode"); let multihash_decoded: Multihash = bytes.try_into().expect("Failed to decode."); - assert_eq!(multihash, multihash_decoded) + assert_eq!(multihash, multihash_decoded); } #[test] diff --git a/crypto/src/signature/bls/implementation.rs b/crypto/src/signature/bls/implementation.rs new file mode 100644 index 00000000000..c3dff70bc9f --- /dev/null +++ b/crypto/src/signature/bls/implementation.rs @@ -0,0 +1,211 @@ +use std::{hash::Hash, marker::PhantomData}; + +/// Implements +/// and +/// +use amcl_wrapper::{ + field_elem::FieldElement, group_elem::GroupElement, group_elem_g1::G1, group_elem_g2::G2, +}; +use sha2::Sha256; + +pub(super) const MESSAGE_CONTEXT: &[u8; 20] = b"for signing messages"; + +// it is not unused? Why am I getting the unused lint here? +#[allow(dead_code)] +const PUBLICKEY_CONTEXT: &[u8; 47] = b"for signing public keys for proof of possession"; + +use super::PRIVATE_KEY_SIZE; +use crate::{ + Algorithm, ConstVec, Error, KeyGenOption, PrivateKey as IrohaPrivateKey, + PublicKey as IrohaPublicKey, +}; + +/// This is a simple alias so the consumer can just use `PrivateKey::random`() to generate a new one +/// instead of wrapping it as a private field +pub type PrivateKey = FieldElement; + +pub trait BlsConfiguration { + const ALGORITHM: Algorithm; + const PK_SIZE: usize; + const SIG_SIZE: usize; + type Generator: GroupElement + Eq + PartialEq + Hash; + type SignatureGroup: GroupElement + Eq + PartialEq + Hash; + fn ate_2_pairing_is_one( + g: &Self::Generator, + sig: &Self::SignatureGroup, + pk: &Self::Generator, + hash: &Self::SignatureGroup, + ) -> bool; + fn set_pairs(p: &(Self::Generator, Self::SignatureGroup)) -> (&G1, &G2); + + /// Creates a new BLS key pair + fn generate(g: &Self::Generator) -> (PublicKey, PrivateKey) { + let sk = PrivateKey::random(); + let pk = PublicKey::new(&sk, g); + (pk, sk) + } + + fn hash_to_point>(v: A, ctx: &[u8]) -> Self::SignatureGroup { + let mut value = Vec::new(); + value.extend_from_slice(ctx); + value.extend_from_slice(v.as_ref()); + Self::SignatureGroup::from_msg_hash(value.as_slice()) + } + + fn hash_msg>( + message: A, + context: Option<&'static [u8]>, + ) -> Self::SignatureGroup { + let ctx: &[u8] = context.unwrap_or(MESSAGE_CONTEXT); + Self::hash_to_point(message, ctx) + } + + fn hash_key(pk: &PublicKey, context: Option<&'static [u8]>) -> Self::SignatureGroup { + let ctx: &[u8] = context.unwrap_or(PUBLICKEY_CONTEXT); + Self::hash_to_point(pk.to_bytes(), ctx) + } +} + +pub struct PublicKey(C::Generator); + +impl PublicKey { + pub fn new(sk: &PrivateKey, g: &C::Generator) -> Self { + Self(g.scalar_mul_const_time(sk)) + + // Self(g * sk) + } + + pub fn to_bytes(&self) -> Vec { + self.0.to_bytes(false) + } + + pub fn from_bytes(bytes: &[u8]) -> Result { + Ok(Self( + C::Generator::from_bytes(bytes).map_err(|e| Error::Parse(format!("{:?}", e)))?, + )) + } +} + +/// Signature over a message. One gotcha for BLS signatures +/// is the need to mitigate rogue key attacks. There are two methods to achieve +/// this: compute additional work to make each message distinct +/// in a signature for each `PublicKey` or +/// use `ProofOfPossession`. `Signature` and `ProofOfPossession` MUST +/// use domain separation values that are different +/// to avoid certain types of attacks and make `Signature` +/// distinct from `ProofOfPossession`. If `ProofOfPossession` +/// and `Signature` use the same value for `context` they are effectively the same. +/// Don't do this. You have been warned. +/// +/// To make messages distinct, use `new_with_rk_mitigation`. If using +/// proof of possession mitigation, use `new`. +#[derive(Debug, Clone)] +pub struct Signature(C::SignatureGroup); + +impl Signature { + pub fn new>( + message: A, + context: Option<&'static [u8]>, + sk: &PrivateKey, + ) -> Self { + Self(C::hash_msg(message, context).scalar_mul_const_time(sk)) + } + + // Verify a signature generated by `new` + pub fn verify>( + &self, + message: A, + context: Option<&'static [u8]>, + pk: &PublicKey, + g: &C::Generator, + ) -> bool { + let hash = C::hash_msg(message, context); + C::ate_2_pairing_is_one(g, &self.0, &pk.0, &hash) + } + + pub fn to_bytes(&self) -> Vec { + self.0.to_bytes(false) + } + + pub fn from_bytes(bytes: &[u8]) -> Result { + Ok(Signature( + C::SignatureGroup::from_bytes(bytes).map_err(|e| Error::Parse(format!("{:?}", e)))?, + )) + } +} + +pub struct BlsImpl(PhantomData); + +impl BlsImpl { + fn parse_public_key(pk: &IrohaPublicKey) -> Result, Error> { + assert_eq!(pk.digest_function, C::ALGORITHM); + PublicKey::from_bytes(&pk.payload) + .map_err(|e| Error::Parse(format!("Failed to parse public key: {}", e))) + } + + fn parse_private_key(sk: &IrohaPrivateKey) -> Result { + assert_eq!(sk.digest_function, C::ALGORITHM); + PrivateKey::from_bytes(&sk.payload) + .map_err(|e| Error::Parse(format!("Failed to parse private key: {}", e))) + } + + // the names are from an RFC, not a good idea to change them + #[allow(clippy::similar_names)] + pub fn keypair( + options: Option, + ) -> Result<(IrohaPublicKey, IrohaPrivateKey), Error> { + let (public_key, private_key) = match options { + Some(option) => match option { + // Follows https://datatracker.ietf.org/doc/draft-irtf-cfrg-bls-signature/?include_text=1 + KeyGenOption::UseSeed(ref seed) => { + let salt = b"BLS-SIG-KEYGEN-SALT-"; + let info = [0u8, PRIVATE_KEY_SIZE.try_into().unwrap()]; // key_info || I2OSP(L, 2) + let mut ikm = vec![0u8; seed.len() + 1]; + ikm[..seed.len()].copy_from_slice(seed); // IKM || I2OSP(0, 1) + let mut okm = [0u8; PRIVATE_KEY_SIZE]; + let h = hkdf::Hkdf::::new(Some(&salt[..]), &ikm); + h.expand(&info[..], &mut okm).map_err(|err| { + Error::KeyGen(format!("Failed to generate keypair: {}", err)) + })?; + let private_key: PrivateKey = PrivateKey::from(&okm); + ( + PublicKey::new(&private_key, &C::Generator::generator()), + private_key, + ) + } + KeyGenOption::FromPrivateKey(ref key) => { + let private_key = Self::parse_private_key(key)?; + ( + PublicKey::new(&private_key, &C::Generator::generator()), + private_key, + ) + } + }, + None => C::generate(&C::Generator::generator()), + }; + Ok(( + IrohaPublicKey { + digest_function: C::ALGORITHM, + payload: ConstVec::new(public_key.to_bytes()), + }, + IrohaPrivateKey { + digest_function: C::ALGORITHM, + payload: ConstVec::new(private_key.to_bytes()), + }, + )) + } + + pub fn sign(message: &[u8], sk: &IrohaPrivateKey) -> Result, Error> { + let sk = Self::parse_private_key(sk)?; + + Ok(Signature::::new(message, None, &sk).to_bytes()) + } + + pub fn verify(message: &[u8], signature: &[u8], pk: &IrohaPublicKey) -> Result { + let pk = Self::parse_public_key(pk)?; + + Ok(Signature::::from_bytes(signature) + .map_err(|_| Error::Parse("Failed to parse signature.".to_string()))? + .verify(message, None, &pk, &C::Generator::generator())) + } +} diff --git a/crypto/src/signature/bls/mod.rs b/crypto/src/signature/bls/mod.rs new file mode 100644 index 00000000000..c3dc918abfb --- /dev/null +++ b/crypto/src/signature/bls/mod.rs @@ -0,0 +1,121 @@ +// Do not expose the [implementation] module & the [implementation::BlsConfiguration] trait +mod implementation; + +pub const PRIVATE_KEY_SIZE: usize = amcl_wrapper::constants::MODBYTES; + +/// This version is the "normal" BLS signature scheme +/// with the public key group in G1 and signature group in G2. +/// 192 byte signatures and 97 byte public keys +mod normal { + use amcl_wrapper::{ + constants::{GroupG1_SIZE, GroupG2_SIZE}, + extension_field_gt::GT, + group_elem_g1::G1, + group_elem_g2::G2, + }; + + use super::{implementation, implementation::BlsConfiguration}; + use crate::Algorithm; + + pub type NormalGenerator = G1; + pub type NormalSignatureGroup = G2; + + #[cfg(test)] + pub fn normal_generate( + g: &NormalGenerator, + ) -> (NormalPublicKey, super::implementation::PrivateKey) { + NormalConfiguration::generate(g) + } + + #[derive(Debug, Clone, Copy)] + pub struct NormalConfiguration; + impl BlsConfiguration for NormalConfiguration { + const ALGORITHM: Algorithm = Algorithm::BlsNormal; + const PK_SIZE: usize = GroupG1_SIZE; + const SIG_SIZE: usize = GroupG2_SIZE; + type Generator = NormalGenerator; + type SignatureGroup = NormalSignatureGroup; + + fn ate_2_pairing_is_one( + p1: &Self::Generator, + g1: &Self::SignatureGroup, + p2: &Self::Generator, + g2: &Self::SignatureGroup, + ) -> bool { + GT::ate_2_pairing(&-p1, g1, p2, g2).is_one() + } + + fn set_pairs((g1, g2): &(Self::Generator, Self::SignatureGroup)) -> (&G1, &G2) { + (g1, g2) + } + } + + pub type NormalBls = implementation::BlsImpl; + #[cfg(test)] + pub type NormalSignature = implementation::Signature; + #[cfg(test)] + pub type NormalPublicKey = implementation::PublicKey; +} + +/// This version is the small BLS signature scheme +/// with the public key group in G2 and signature group in G1. +/// 97 bytes signatures and 192 byte public keys +/// +/// This results in smaller signatures but slower operations and bigger public key. +/// This is good for situations where space is a consideration and verification is infrequent +mod small { + use amcl_wrapper::{ + constants::{GroupG1_SIZE, GroupG2_SIZE}, + extension_field_gt::GT, + group_elem_g1::G1, + group_elem_g2::G2, + }; + + use super::implementation::{self, BlsConfiguration}; + use crate::Algorithm; + + pub type SmallGenerator = G2; + pub type SmallSignatureGroup = G1; + + #[cfg(test)] + pub fn small_generate( + g: &SmallGenerator, + ) -> (SmallPublicKey, super::implementation::PrivateKey) { + SmallConfiguration::generate(g) + } + + #[derive(Debug, Clone, Copy)] + pub struct SmallConfiguration; + impl BlsConfiguration for SmallConfiguration { + const ALGORITHM: Algorithm = Algorithm::BlsSmall; + const PK_SIZE: usize = GroupG2_SIZE; + const SIG_SIZE: usize = GroupG1_SIZE; + type Generator = SmallGenerator; + type SignatureGroup = SmallSignatureGroup; + + fn ate_2_pairing_is_one( + p1: &Self::Generator, + g1: &Self::SignatureGroup, + p2: &Self::Generator, + g2: &Self::SignatureGroup, + ) -> bool { + GT::ate_2_pairing(g1, &-p1, g2, p2).is_one() + } + + fn set_pairs((g2, g1): &(Self::Generator, Self::SignatureGroup)) -> (&G1, &G2) { + (g1, g2) + } + } + + pub type SmallBls = implementation::BlsImpl; + #[cfg(test)] + pub type SmallSignature = implementation::Signature; + #[cfg(test)] + pub type SmallPublicKey = implementation::PublicKey; +} + +pub use normal::NormalBls as BlsNormal; +pub use small::SmallBls as BlsSmall; + +#[cfg(test)] +mod tests; diff --git a/crypto/src/signature/bls/tests.rs b/crypto/src/signature/bls/tests.rs new file mode 100644 index 00000000000..243f4d27bca --- /dev/null +++ b/crypto/src/signature/bls/tests.rs @@ -0,0 +1,76 @@ +use amcl_wrapper::{ + constants::{GroupG1_SIZE, MODBYTES}, + field_elem::FieldElement, + group_elem::GroupElement, + types_g2::GroupG2_SIZE, +}; + +use super::{ + implementation::{BlsConfiguration, BlsImpl, Signature, MESSAGE_CONTEXT}, + normal::{normal_generate, NormalConfiguration, NormalGenerator, NormalSignature}, + small::{small_generate, SmallConfiguration, SmallGenerator, SmallSignature}, +}; +use crate::KeyGenOption; + +const MESSAGE_1: &[u8; 22] = b"This is a test message"; +const MESSAGE_2: &[u8; 20] = b"Another test message"; +const SEED: &[u8; 10] = &[1u8; 10]; + +#[test] +fn size_check() { + let msg = FieldElement::random(); + let g = NormalGenerator::generator(); + let (pk, sk) = normal_generate(&g); + assert_eq!(sk.to_bytes().len(), MODBYTES); + assert_eq!(pk.to_bytes().len(), GroupG1_SIZE); + let sig = NormalSignature::new(msg.to_bytes().as_slice(), None, &sk); + assert_eq!(sig.to_bytes().len(), GroupG2_SIZE); + + let g = SmallGenerator::generator(); + let (pk, sk) = small_generate(&g); + assert_eq!(sk.to_bytes().len(), MODBYTES); + assert_eq!(pk.to_bytes().len(), GroupG2_SIZE); + let sig = SmallSignature::new(msg.to_bytes().as_slice(), None, &sk); + assert_eq!(sig.to_bytes().len(), GroupG1_SIZE); +} + +fn signature_generation_from_seed() { + let keypair_1 = BlsImpl::::keypair(Some(KeyGenOption::UseSeed(SEED.to_vec()))).unwrap(); + let keypair_2 = BlsImpl::::keypair(Some(KeyGenOption::UseSeed(SEED.to_vec()))).unwrap(); + assert_eq!(keypair_1, keypair_2); +} + +fn signature_verification() { + let g = C::Generator::generator(); + let (pk, sk) = C::generate(&g); + + let signature_1 = Signature::::new(&MESSAGE_1[..], None, &sk); + assert!(signature_1.verify(&MESSAGE_1[..], None, &pk, &g)); + + let signature_2 = Signature::::new(&MESSAGE_2[..], Some(MESSAGE_CONTEXT), &sk); + assert!(signature_2.verify(&MESSAGE_2[..], Some(MESSAGE_CONTEXT), &pk, &g)); + + // Should fail for different messages + assert!(!signature_1.verify(&MESSAGE_2[..], Some(MESSAGE_CONTEXT), &pk, &g)); + assert!(!signature_2.verify(&MESSAGE_1[..], None, &pk, &g)); +} + +#[test] +fn normal_signature_generation_from_seed() { + signature_generation_from_seed::(); +} + +#[test] +fn normal_signature_verification() { + signature_verification::(); +} + +#[test] +fn small_signature_generation_from_seed() { + signature_generation_from_seed::(); +} + +#[test] +fn small_signature_verification() { + signature_verification::(); +} diff --git a/crypto/src/signature/ed25519.rs b/crypto/src/signature/ed25519.rs new file mode 100644 index 00000000000..6f462ee9b75 --- /dev/null +++ b/crypto/src/signature/ed25519.rs @@ -0,0 +1,168 @@ +use std::convert::TryFrom; + +use arrayref::array_ref; +use ed25519_dalek::{Signature, SigningKey, VerifyingKey as PK}; +pub use ed25519_dalek::{ + EXPANDED_SECRET_KEY_LENGTH as PRIVATE_KEY_SIZE, PUBLIC_KEY_LENGTH as PUBLIC_KEY_SIZE, + SIGNATURE_LENGTH as SIGNATURE_SIZE, +}; +use iroha_primitives::const_vec::ConstVec; +use rand::{rngs::OsRng, SeedableRng}; +use rand_chacha::ChaChaRng; +use sha2::Digest; +use signature::{Signer as _, Verifier as _}; +use zeroize::Zeroize; + +const ALGORITHM: Algorithm = Algorithm::Ed25519; + +use crate::{Algorithm, Error, KeyGenOption, PrivateKey, PublicKey}; + +fn parse_private_key(sk: &PrivateKey) -> Result { + assert_eq!(sk.digest_function, ALGORITHM); + SigningKey::from_keypair_bytes( + &<[u8; 64]>::try_from(&sk.payload[..]).map_err(|e| Error::Parse(e.to_string()))?, + ) + .map_err(|e| Error::Parse(e.to_string())) +} + +fn parse_public_key(pk: &PublicKey) -> Result { + assert_eq!(pk.digest_function, ALGORITHM); + PK::try_from(&pk.payload[..]).map_err(|e| Error::Parse(e.to_string())) +} + +#[derive(Debug, Clone, Copy)] +pub struct Ed25519Sha512; + +impl Ed25519Sha512 { + pub fn keypair(mut option: Option) -> Result<(PublicKey, PrivateKey), Error> { + let kp = match option { + Some(KeyGenOption::UseSeed(ref mut s)) => { + let hash = sha2::Sha256::digest(s.as_slice()); + s.zeroize(); + let mut rng = ChaChaRng::from_seed(*array_ref!(hash.as_slice(), 0, 32)); + SigningKey::generate(&mut rng) + } + Some(KeyGenOption::FromPrivateKey(ref s)) => parse_private_key(s)?, + None => { + let mut rng = OsRng; + SigningKey::generate(&mut rng) + } + }; + Ok(( + PublicKey { + digest_function: ALGORITHM, + payload: ConstVec::new(kp.verifying_key().to_bytes().to_vec()), + }, + PrivateKey { + digest_function: ALGORITHM, + payload: ConstVec::new(kp.to_keypair_bytes().to_vec()), + }, + )) + } + pub fn sign(message: &[u8], sk: &PrivateKey) -> Result, Error> { + let kp = parse_private_key(sk)?; + Ok(kp.sign(message).to_bytes().to_vec()) + } + pub fn verify(message: &[u8], signature: &[u8], pk: &PublicKey) -> Result { + let p = parse_public_key(pk)?; + let s = Signature::try_from(signature).map_err(|e| Error::Parse(e.to_string()))?; + p.verify(message, &s) + .map_err(|e| Error::Signing(e.to_string()))?; + Ok(true) + } +} + +#[cfg(test)] +// unsafe code is needed to check consistency with libsodium, which is a C library +#[allow(unsafe_code)] +mod test { + use libsodium_sys as ffi; + + use self::Ed25519Sha512; + use super::*; + use crate::{KeyGenOption, PrivateKey, PublicKey}; + + const MESSAGE_1: &[u8] = b"This is a dummy message for use with tests"; + const SIGNATURE_1: &str = "451b5b8e8725321541954997781de51f4142e4a56bab68d24f6a6b92615de5eefb74134138315859a32c7cf5fe5a488bc545e2e08e5eedfd1fb10188d532d808"; + const PRIVATE_KEY: &str = "1c1179a560d092b90458fe6ab8291215a427fcd6b3927cb240701778ef55201927c96646f2d4632d4fc241f84cbc427fbc3ecaa95becba55088d6c7b81fc5bbf"; + const PUBLIC_KEY: &str = "27c96646f2d4632d4fc241f84cbc427fbc3ecaa95becba55088d6c7b81fc5bbf"; + + #[test] + #[ignore] + fn create_new_keys() { + let (p, s) = Ed25519Sha512::keypair(None).unwrap(); + + println!("{s:?}"); + println!("{p:?}"); + } + + #[test] + fn ed25519_load_keys() { + let secret = PrivateKey::from_hex(Algorithm::Ed25519, PRIVATE_KEY).unwrap(); + let sres = Ed25519Sha512::keypair(Some(KeyGenOption::FromPrivateKey(secret))); + assert!(sres.is_ok()); + let (p1, s1) = sres.unwrap(); + + assert_eq!( + s1, + PrivateKey::from_hex(Algorithm::Ed25519, PRIVATE_KEY).unwrap() + ); + assert_eq!( + p1, + PublicKey::from_hex(Algorithm::Ed25519, PUBLIC_KEY).unwrap() + ); + } + + #[test] + fn ed25519_verify() { + let secret = PrivateKey::from_hex(Algorithm::Ed25519, PRIVATE_KEY).unwrap(); + let (p, _) = Ed25519Sha512::keypair(Some(KeyGenOption::FromPrivateKey(secret))).unwrap(); + + let result = + Ed25519Sha512::verify(MESSAGE_1, hex::decode(SIGNATURE_1).unwrap().as_slice(), &p); + assert!(result.is_ok()); + assert!(result.unwrap()); + + //Check if signatures produced here can be verified by libsodium + let signature = hex::decode(SIGNATURE_1).unwrap(); + let res = unsafe { + ffi::crypto_sign_ed25519_verify_detached( + signature.as_slice().as_ptr(), + MESSAGE_1.as_ptr(), + MESSAGE_1.len() as u64, + p.payload().as_ptr(), + ) + }; + assert_eq!(res, 0); + } + + #[test] + fn ed25519_sign() { + let secret = PrivateKey::from_hex(Algorithm::Ed25519, PRIVATE_KEY).unwrap(); + let (p, s) = Ed25519Sha512::keypair(Some(KeyGenOption::FromPrivateKey(secret))).unwrap(); + + let sig = Ed25519Sha512::sign(MESSAGE_1, &s).unwrap(); + let result = Ed25519Sha512::verify(MESSAGE_1, &sig, &p); + assert!(result.is_ok()); + assert!(result.unwrap()); + + assert_eq!(sig.len(), SIGNATURE_SIZE); + assert_eq!(hex::encode(sig.as_slice()), SIGNATURE_1); + + //Check if libsodium signs the message and this module still can verify it + //And that private keys can sign with other libraries + let mut signature = [0u8; ffi::crypto_sign_ed25519_BYTES as usize]; + unsafe { + ffi::crypto_sign_ed25519_detached( + signature.as_mut_ptr(), + std::ptr::null_mut(), + MESSAGE_1.as_ptr(), + MESSAGE_1.len() as u64, + s.payload().as_ptr(), + ) + }; + let result = Ed25519Sha512::verify(MESSAGE_1, &signature, &p); + assert!(result.is_ok()); + assert!(result.unwrap()); + } +} diff --git a/crypto/src/signature.rs b/crypto/src/signature/mod.rs similarity index 73% rename from crypto/src/signature.rs rename to crypto/src/signature/mod.rs index edaa5249e3c..387a939be29 100644 --- a/crypto/src/signature.rs +++ b/crypto/src/signature/mod.rs @@ -1,4 +1,18 @@ -#![allow(clippy::std_instead_of_core)] +// pub(crate) for inner modules it is not redundant, the contents of `signature` module get re-exported at root +#![allow(clippy::redundant_pub_crate)] + +#[cfg(feature = "std")] +#[cfg(not(feature = "ffi_import"))] +pub(crate) mod bls; + +#[cfg(feature = "std")] +#[cfg(not(feature = "ffi_import"))] +pub(crate) mod ed25519; + +#[cfg(feature = "std")] +#[cfg(not(feature = "ffi_import"))] +pub(crate) mod secp256k1; + #[cfg(not(feature = "std"))] use alloc::{boxed::Box, collections::btree_set, format, string::String, vec, vec::Vec}; use core::marker::PhantomData; @@ -10,22 +24,13 @@ use derive_more::{Deref, DerefMut}; use iroha_macro::ffi_impl_opaque; use iroha_primitives::const_vec::ConstVec; use iroha_schema::{IntoSchema, TypeId}; -use parity_scale_codec::{Decode, Encode, Input}; +use parity_scale_codec::{Decode, Encode}; #[cfg(not(feature = "ffi_import"))] use serde::{Deserialize, Serialize}; -#[cfg(feature = "std")] -#[cfg(not(feature = "ffi_import"))] -use ursa::{ - keys::{PrivateKey as UrsaPrivateKey, PublicKey as UrsaPublicKey}, - signatures::{ - bls::{normal::Bls as BlsNormal, small::Bls as BlsSmall}, - ed25519::Ed25519Sha512, - secp256k1::EcdsaSecp256k1Sha256, - SignatureScheme, - }, -}; - -use crate::{ffi, Error, PublicKey}; + +#[cfg(any(feature = "std", feature = "import_ffi"))] +use crate::Error; +use crate::{ffi, PublicKey}; #[cfg(feature = "std")] use crate::{HashOf, KeyPair}; @@ -63,13 +68,14 @@ impl Signature { let (public_key, private_key) = key_pair.into(); let algorithm: crate::Algorithm = private_key.digest_function(); - let private_key = UrsaPrivateKey(private_key.payload.into_vec()); let signature = match algorithm { - crate::Algorithm::Ed25519 => Ed25519Sha512::new().sign(payload, &private_key), - crate::Algorithm::Secp256k1 => EcdsaSecp256k1Sha256::new().sign(payload, &private_key), - crate::Algorithm::BlsSmall => BlsSmall::new().sign(payload, &private_key), - crate::Algorithm::BlsNormal => BlsNormal::new().sign(payload, &private_key), + crate::Algorithm::Ed25519 => ed25519::Ed25519Sha512::sign(payload, &private_key), + crate::Algorithm::Secp256k1 => { + secp256k1::EcdsaSecp256k1Sha256::sign(payload, &private_key) + } + crate::Algorithm::BlsSmall => bls::BlsSmall::sign(payload, &private_key), + crate::Algorithm::BlsNormal => bls::BlsNormal::sign(payload, &private_key), }?; Ok(Self { public_key, @@ -84,35 +90,24 @@ impl Signature { #[cfg(any(feature = "std", feature = "import_ffi"))] pub fn verify(&self, payload: &[u8]) -> Result<(), Error> { let algorithm: crate::Algorithm = self.public_key.digest_function(); - let public_key = UrsaPublicKey(self.public_key.payload().to_owned()); match algorithm { crate::Algorithm::Ed25519 => { - Ed25519Sha512::new().verify(payload, self.payload(), &public_key) + ed25519::Ed25519Sha512::verify(payload, self.payload(), &self.public_key) } crate::Algorithm::Secp256k1 => { - EcdsaSecp256k1Sha256::new().verify(payload, self.payload(), &public_key) + secp256k1::EcdsaSecp256k1Sha256::verify(payload, self.payload(), &self.public_key) } crate::Algorithm::BlsSmall => { - BlsSmall::new().verify(payload, self.payload(), &public_key) + bls::BlsSmall::verify(payload, self.payload(), &self.public_key) } crate::Algorithm::BlsNormal => { - BlsNormal::new().verify(payload, self.payload(), &public_key) + bls::BlsNormal::verify(payload, self.payload(), &self.public_key) } }?; Ok(()) } - - /// Get the payload of the public key in this signature. - pub fn key_payload(&self) -> &[u8] { - self.public_key.payload() - } - - /// Get the encrypted payload of this signature. - pub fn signature_payload(&self) -> &[u8] { - self.payload.as_ref() - } } // TODO: Enable in ffi_import @@ -192,7 +187,7 @@ impl Ord for SignatureOf { #[cfg(not(feature = "ffi_import"))] impl core::hash::Hash for SignatureOf { fn hash(&self, state: &mut H) { - self.0.hash(state) + self.0.hash(state); } } @@ -220,38 +215,17 @@ impl SignatureOf { /// # Errors /// Fails if signing fails #[cfg(any(feature = "std", feature = "import_ffi"))] - pub fn from_hash(key_pair: KeyPair, hash: HashOf) -> Result { + fn from_hash(key_pair: KeyPair, hash: HashOf) -> Result { Signature::new(key_pair, hash.as_ref()).map(|signature| Self(signature, PhantomData)) } - /// Transmutes signature to some specific type - pub fn transmute(self) -> SignatureOf { - SignatureOf(self.0, PhantomData) - } - - /// Transmutes signature to some specific type - /// - /// # Warning: - /// - /// This method uses [`core::mem::transmute`] internally - pub const fn transmute_ref(&self) -> &SignatureOf { - #[allow(unsafe_code, trivial_casts)] - // SAFETY: transmuting is safe, because we're casting a - // pointer of type `SignatureOf` into a pointer of type - // `SignatureOf`, where `` and `` type parameters are - // normally related types that have the exact same alignment. - unsafe { - &*((self as *const Self).cast::>()) - } - } - /// Verify signature for this hash /// /// # Errors /// /// Fails if the given hash didn't pass verification #[cfg(any(feature = "std", feature = "import_ffi"))] - pub fn verify_hash(&self, hash: HashOf) -> Result<(), Error> { + fn verify_hash(&self, hash: HashOf) -> Result<(), Error> { self.0.verify(hash.as_ref()) } } @@ -324,7 +298,7 @@ impl Eq for SignatureWrapperOf {} #[cfg(not(feature = "ffi_import"))] impl PartialOrd for SignatureWrapperOf { fn partial_cmp(&self, other: &Self) -> Option { - self.0.public_key().partial_cmp(other.0.public_key()) + Some(self.cmp(other)) } } #[cfg(not(feature = "ffi_import"))] @@ -349,7 +323,7 @@ impl core::hash::Hash for SignatureWrapperOf { /// /// GUARANTEE 1: Each signature corresponds to a different public key #[allow(clippy::derived_hash_with_manual_eq)] -#[derive(Hash, Encode, Serialize, IntoSchema)] +#[derive(Hash, Decode, Encode, Deserialize, Serialize, IntoSchema)] #[serde(transparent)] // Transmute guard #[repr(transparent)] @@ -399,38 +373,6 @@ impl Ord for SignaturesOf { } } -#[cfg(not(feature = "ffi_import"))] -impl<'de, T> Deserialize<'de> for SignaturesOf { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - use serde::de::Error as _; - - let signatures = >>::deserialize(deserializer)?; - - if signatures.is_empty() { - return Err(D::Error::custom( - "Could not deserialize SignaturesOf. Input contains 0 signatures", - )); - } - - Ok(Self { signatures }) - } -} -#[cfg(not(feature = "ffi_import"))] -impl Decode for SignaturesOf { - fn decode(input: &mut I) -> Result { - let signatures = >>::decode(input)?; - - if signatures.is_empty() { - return Err("Could not decode SignaturesOf. Input contains 0 signatures".into()); - } - - Ok(Self { signatures }) - } -} - #[cfg(not(feature = "ffi_import"))] impl IntoIterator for SignaturesOf { type Item = SignatureOf; @@ -475,11 +417,9 @@ impl From> for btree_set::BTreeSet> { } #[cfg(not(feature = "ffi_import"))] -impl TryFrom>> for SignaturesOf { - type Error = Error; - - fn try_from(signatures: btree_set::BTreeSet>) -> Result { - signatures.into_iter().collect() +impl From>> for SignaturesOf { + fn from(source: btree_set::BTreeSet>) -> Self { + source.into_iter().collect() } } @@ -493,48 +433,21 @@ impl From> for SignaturesOf { } #[cfg(not(feature = "ffi_import"))] -impl FromIterator> for Result, Error> { - fn from_iter>>(iter: T) -> Self { - let mut iter = iter.into_iter(); - iter.next() - .ok_or(Error::EmptySignatureIter) - .map(move |first_signature| core::iter::once(first_signature).chain(iter)) - .map(|signatures| signatures.map(SignatureWrapperOf).collect()) - .map(|signatures| SignaturesOf { signatures }) +impl FromIterator> for SignaturesOf { + fn from_iter>>(signatures: T) -> Self { + Self { + signatures: signatures.into_iter().map(SignatureWrapperOf).collect(), + } } } #[cfg(not(feature = "ffi_import"))] impl SignaturesOf { - /// Transmutes signature generic type - /// - /// # Warning - /// - /// This method uses [`core::mem::transmute`] internally - #[allow(unsafe_code, clippy::transmute_undefined_repr)] - pub fn transmute(self) -> SignaturesOf { - // SAFETY: Safe because we are transmuting to a pointer of - // type `` which is related to type ``. - let signatures = unsafe { core::mem::transmute(self.signatures) }; - SignaturesOf { signatures } - } - /// Adds a signature. If the signature with this key was present, replaces it. pub fn insert(&mut self, signature: SignatureOf) { self.signatures.insert(SignatureWrapperOf(signature)); } - /// Return signatures that have passed verification, remove all others. - #[cfg(feature = "std")] - pub fn retain_verified_by_hash( - &mut self, - hash: HashOf, - ) -> impl ExactSizeIterator> { - self.signatures - .retain(|sign| sign.verify_hash(hash).is_ok()); - self.iter() - } - /// Return all signatures. #[inline] pub fn iter(&self) -> impl ExactSizeIterator> { @@ -548,11 +461,6 @@ impl SignaturesOf { self.signatures.len() } - /// Clear signatures. - pub fn clear(&mut self) { - self.signatures.clear() - } - /// Verify signatures for this hash /// /// # Errors @@ -568,6 +476,11 @@ impl SignaturesOf { }) }) } + + /// Returns true if the set is a subset of another, i.e., other contains at least all the elements in self. + pub fn is_subset(&self, other: &Self) -> bool { + self.signatures.is_subset(&other.signatures) + } } #[cfg(feature = "std")] @@ -588,11 +501,6 @@ impl SignaturesOf { pub fn verify(&self, item: &T) -> Result<(), SignatureVerificationFail> { self.verify_hash(HashOf::new(item)) } - - /// Return signatures that have passed verification, remove all others. - pub fn retain_verified(&mut self, value: &T) -> impl ExactSizeIterator> { - self.retain_verified_by_hash(HashOf::new(value)) - } } /// Verification failed of some signature due to following reason @@ -632,8 +540,6 @@ impl std::error::Error for SignatureVerificationFail {} #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - #[cfg(feature = "std")] use super::*; #[cfg(any(feature = "std", feature = "ffi_import"))] @@ -650,7 +556,7 @@ mod tests { let signature = Signature::new(key_pair.clone(), message).expect("Failed to create signature."); assert!(*signature.public_key() == *key_pair.public_key()); - assert!(signature.verify(message).is_ok()) + assert!(signature.verify(message).is_ok()); } #[test] @@ -664,7 +570,7 @@ mod tests { let signature = Signature::new(key_pair.clone(), message).expect("Failed to create signature."); assert!(*signature.public_key() == *key_pair.public_key()); - assert!(signature.verify(message).is_ok()) + assert!(signature.verify(message).is_ok()); } #[test] @@ -678,7 +584,7 @@ mod tests { let signature = Signature::new(key_pair.clone(), message).expect("Failed to create signature."); assert!(*signature.public_key() == *key_pair.public_key()); - assert!(signature.verify(message).is_ok()) + assert!(signature.verify(message).is_ok()); } #[test] @@ -692,37 +598,7 @@ mod tests { let signature = Signature::new(key_pair.clone(), message).expect("Failed to create signature."); assert!(*signature.public_key() == *key_pair.public_key()); - assert!(signature.verify(message).is_ok()) - } - - #[test] - #[cfg(feature = "std")] - #[cfg(not(feature = "ffi_import"))] - fn decode_signatures_of() { - use parity_scale_codec::DecodeAll; - - let no_signatures: SignaturesOf = SignaturesOf { - signatures: btree_set::BTreeSet::new(), - }; - let bytes = no_signatures.encode(); - - let signatures = SignaturesOf::::decode_all(&mut &bytes[..]); - assert!(signatures.is_err()); - } - - #[test] - #[cfg(feature = "std")] - #[cfg(not(feature = "ffi_import"))] - fn deserialize_signatures_of() -> Result<(), serde_json::Error> { - let no_signatures: SignaturesOf = SignaturesOf { - signatures: btree_set::BTreeSet::new(), - }; - let serialized = serde_json::to_string(&no_signatures)?; - - let signatures = serde_json::from_str::>(serialized.as_str()); - assert!(signatures.is_err()); - - Ok(()) + assert!(signature.verify(message).is_ok()); } #[test] @@ -734,11 +610,9 @@ mod tests { SignatureOf::new(key_pair.clone(), &1).expect("Failed to sign"), SignatureOf::new(key_pair.clone(), &2).expect("Failed to sign"), SignatureOf::new(key_pair, &3).expect("Failed to sign"), - ]; - let signatures = signatures - .into_iter() - .collect::, Error>>() - .expect("One signature must stay"); + ] + .into_iter() + .collect::>(); // Signatures with the same public key was deduplicated assert_eq!(signatures.len(), 1); } diff --git a/crypto/src/signature/secp256k1.rs b/crypto/src/signature/secp256k1.rs new file mode 100644 index 00000000000..8cca69a1fd5 --- /dev/null +++ b/crypto/src/signature/secp256k1.rs @@ -0,0 +1,279 @@ +use self::ecdsa_secp256k1::EcdsaSecp256k1Impl; +use crate::{Algorithm, Error, KeyGenOption, PrivateKey, PublicKey}; + +pub const PRIVATE_KEY_SIZE: usize = 32; +pub const PUBLIC_KEY_SIZE: usize = 33; + +const ALGORITHM: Algorithm = Algorithm::Secp256k1; + +pub struct EcdsaSecp256k1Sha256; + +impl EcdsaSecp256k1Sha256 { + pub fn keypair(option: Option) -> Result<(PublicKey, PrivateKey), Error> { + EcdsaSecp256k1Impl::keypair(option) + } + pub fn sign(message: &[u8], sk: &PrivateKey) -> Result, Error> { + EcdsaSecp256k1Impl::sign(message, sk) + } + pub fn verify(message: &[u8], signature: &[u8], pk: &PublicKey) -> Result { + EcdsaSecp256k1Impl::verify(message, signature, pk) + } +} + +mod ecdsa_secp256k1 { + use amcl::secp256k1::ecp; + use arrayref::array_ref; + use digest::Digest as _; + use iroha_primitives::const_vec::ConstVec; + use rand::{rngs::OsRng, RngCore, SeedableRng}; + use rand_chacha::ChaChaRng; + use signature::{Signer as _, Verifier as _}; + use zeroize::Zeroize; + + use super::{ALGORITHM, PRIVATE_KEY_SIZE, PUBLIC_KEY_SIZE}; + use crate::{Error, KeyGenOption, PrivateKey, PublicKey}; + + pub struct EcdsaSecp256k1Impl; + type Digest = sha2::Sha256; + + impl EcdsaSecp256k1Impl { + pub fn public_key_compressed(pk: &PublicKey) -> Vec { + assert_eq!(pk.digest_function, ALGORITHM); + let mut compressed = [0u8; PUBLIC_KEY_SIZE]; + ecp::ECP::frombytes(&pk.payload[..]).tobytes(&mut compressed, true); + compressed.to_vec() + } + + pub fn keypair(option: Option) -> Result<(PublicKey, PrivateKey), Error> { + let signing_key = match option { + Some(mut o) => match o { + KeyGenOption::UseSeed(ref mut seed) => { + let mut s = [0u8; PRIVATE_KEY_SIZE]; + let mut rng = ChaChaRng::from_seed(*array_ref!(seed.as_slice(), 0, 32)); + seed.zeroize(); + rng.fill_bytes(&mut s); + let k = Digest::digest(s); + s.zeroize(); + k256::SecretKey::from_slice(k.as_slice())? + } + KeyGenOption::FromPrivateKey(ref s) => { + assert_eq!(s.digest_function, ALGORITHM); + k256::SecretKey::from_slice(&s.payload[..])? + } + }, + None => k256::SecretKey::random(&mut OsRng), + }; + + let public_key = signing_key.public_key(); + let compressed = public_key.to_sec1_bytes(); //serialized as compressed point + Ok(( + PublicKey { + digest_function: ALGORITHM, + payload: ConstVec::new(compressed), + }, + PrivateKey { + digest_function: ALGORITHM, + payload: ConstVec::new(signing_key.to_bytes().to_vec()), + }, + )) + } + + pub fn sign(message: &[u8], sk: &PrivateKey) -> Result, Error> { + assert_eq!(sk.digest_function, ALGORITHM); + let signing_key = k256::SecretKey::from_slice(&sk.payload[..]) + .map_err(|e| Error::Signing(format!("{:?}", e)))?; + let signing_key = k256::ecdsa::SigningKey::from(signing_key); + + let signature: k256::ecdsa::Signature = signing_key.sign(message); + Ok(signature.to_bytes().to_vec()) + } + + pub fn verify(message: &[u8], signature: &[u8], pk: &PublicKey) -> Result { + let compressed_pk = Self::public_key_compressed(pk); + let verifying_key = k256::PublicKey::from_sec1_bytes(&compressed_pk) + .map_err(|e| Error::Signing(format!("{:?}", e)))?; + let signature = k256::ecdsa::Signature::from_slice(signature) + .map_err(|e| Error::Signing(format!("{:?}", e)))?; + + let verifying_key = k256::ecdsa::VerifyingKey::from(verifying_key); + + Ok(verifying_key.verify(message, &signature).is_ok()) + } + } +} + +impl From for Error { + fn from(error: elliptic_curve::Error) -> Error { + // RustCrypto doesn't expose any kind of error information =( + Error::Other(format!("{error}")) + } +} + +#[cfg(test)] +mod test { + use amcl::secp256k1::ecp; + use openssl::{ + bn::{BigNum, BigNumContext}, + ec::{EcGroup, EcKey, EcPoint}, + ecdsa::EcdsaSig, + nid::Nid, + }; + use sha2::Digest; + + use super::*; + + const MESSAGE_1: &[u8] = b"This is a dummy message for use with tests"; + const SIGNATURE_1: &str = "ae46d3fec8e2eb95ebeaf95f7f096ec4bf517f5ef898e4379651f8af8e209ed75f3c47156445d6687a5f817fb3e188e2a76df653b330df859ec47579c8c409be"; + const PRIVATE_KEY: &str = "e4f21b38e005d4f895a29e84948d7cc83eac79041aeb644ee4fab8d9da42f713"; + const PUBLIC_KEY: &str = "0242c1e1f775237a26da4fd51b8d75ee2709711f6e90303e511169a324ef0789c0"; + + fn public_key_uncompressed(pk: &PublicKey) -> Vec { + const PUBLIC_UNCOMPRESSED_KEY_SIZE: usize = 65; + + assert_eq!(pk.digest_function, ALGORITHM); + let mut uncompressed = [0u8; PUBLIC_UNCOMPRESSED_KEY_SIZE]; + ecp::ECP::frombytes(&pk.payload[..]).tobytes(&mut uncompressed, false); + uncompressed.to_vec() + } + + #[test] + #[ignore] + fn create_new_keys() { + let (s, p) = EcdsaSecp256k1Sha256::keypair(None).unwrap(); + + println!("{s:?}"); + println!("{p:?}"); + } + + #[test] + fn secp256k1_load_keys() { + let secret = PrivateKey::from_hex(ALGORITHM, PRIVATE_KEY).unwrap(); + let sres = EcdsaSecp256k1Sha256::keypair(Some(KeyGenOption::FromPrivateKey(secret))); + assert!(sres.is_ok()); + } + + #[test] + fn secp256k1_compatibility() { + let secret = PrivateKey::from_hex(ALGORITHM, PRIVATE_KEY).unwrap(); + let (p, s) = + EcdsaSecp256k1Sha256::keypair(Some(KeyGenOption::FromPrivateKey(secret))).unwrap(); + + let sk = secp256k1::SecretKey::from_slice(s.payload()); + assert!(sk.is_ok()); + let pk = secp256k1::PublicKey::from_slice(p.payload()); + assert!(pk.is_ok()); + + let openssl_group = EcGroup::from_curve_name(Nid::SECP256K1).unwrap(); + let mut ctx = BigNumContext::new().unwrap(); + let openssl_point = + EcPoint::from_bytes(&openssl_group, &public_key_uncompressed(&p)[..], &mut ctx); + assert!(openssl_point.is_ok()); + } + + #[test] + fn secp256k1_verify() { + let p = PublicKey::from_hex(ALGORITHM, PUBLIC_KEY).unwrap(); + + let result = EcdsaSecp256k1Sha256::verify( + MESSAGE_1, + hex::decode(SIGNATURE_1).unwrap().as_slice(), + &p, + ); + assert!(result.is_ok()); + assert!(result.unwrap()); + + let context = secp256k1::Secp256k1::new(); + let pk = + secp256k1::PublicKey::from_slice(hex::decode(PUBLIC_KEY).unwrap().as_slice()).unwrap(); + + let h = sha2::Sha256::digest(MESSAGE_1); + let msg = secp256k1::Message::from_digest_slice(h.as_slice()).unwrap(); + + //Check if signatures produced here can be verified by secp256k1 + let mut signature = + secp256k1::ecdsa::Signature::from_compact(&hex::decode(SIGNATURE_1).unwrap()[..]) + .unwrap(); + signature.normalize_s(); + let result = context.verify_ecdsa(&msg, &signature, &pk); + assert!(result.is_ok()); + + let openssl_group = EcGroup::from_curve_name(Nid::SECP256K1).unwrap(); + let mut ctx = BigNumContext::new().unwrap(); + let openssl_point = + EcPoint::from_bytes(&openssl_group, &pk.serialize_uncompressed(), &mut ctx).unwrap(); + let openssl_pkey = EcKey::from_public_key(&openssl_group, &openssl_point).unwrap(); + + // Check if the signatures produced here can be verified by openssl + let (r, s) = SIGNATURE_1.split_at(SIGNATURE_1.len() / 2); + let openssl_r = BigNum::from_hex_str(r).unwrap(); + let openssl_s = BigNum::from_hex_str(s).unwrap(); + let openssl_sig = EcdsaSig::from_private_components(openssl_r, openssl_s).unwrap(); + let openssl_result = openssl_sig.verify(h.as_slice(), &openssl_pkey); + assert!(openssl_result.is_ok()); + assert!(openssl_result.unwrap()); + } + + #[test] + fn secp256k1_sign() { + let secret = PrivateKey::from_hex(ALGORITHM, PRIVATE_KEY).unwrap(); + let (p, s) = + EcdsaSecp256k1Sha256::keypair(Some(KeyGenOption::FromPrivateKey(secret))).unwrap(); + + let sig = EcdsaSecp256k1Sha256::sign(MESSAGE_1, &s).unwrap(); + let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, &sig, &p); + assert!(result.is_ok()); + assert!(result.unwrap()); + + assert_eq!(sig.len(), 64); + + // Check if secp256k1 signs the message and this module still can verify it + // And that private keys can sign with other libraries + let context = secp256k1::Secp256k1::new(); + let sk = + secp256k1::SecretKey::from_slice(hex::decode(PRIVATE_KEY).unwrap().as_slice()).unwrap(); + + let h = sha2::Sha256::digest(MESSAGE_1); + + let msg = secp256k1::Message::from_digest_slice(h.as_slice()).unwrap(); + let sig_1 = context.sign_ecdsa(&msg, &sk).serialize_compact(); + + let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, &sig_1, &p); + + assert!(result.is_ok()); + assert!(result.unwrap()); + + let openssl_group = EcGroup::from_curve_name(Nid::SECP256K1).unwrap(); + let mut ctx = BigNumContext::new().unwrap(); + let openssl_point = + EcPoint::from_bytes(&openssl_group, &public_key_uncompressed(&p)[..], &mut ctx) + .unwrap(); + let openssl_public_key = EcKey::from_public_key(&openssl_group, &openssl_point).unwrap(); + let openssl_secret_key = EcKey::from_private_components( + &openssl_group, + &BigNum::from_hex_str(PRIVATE_KEY).unwrap(), + &openssl_point, + ) + .unwrap(); + + let openssl_sig = EcdsaSig::sign(h.as_slice(), &openssl_secret_key).unwrap(); + let openssl_result = openssl_sig.verify(h.as_slice(), &openssl_public_key); + assert!(openssl_result.is_ok()); + assert!(openssl_result.unwrap()); + let mut temp_sig = Vec::new(); + temp_sig.extend(openssl_sig.r().to_vec()); + temp_sig.extend(openssl_sig.s().to_vec()); + + // secp256k1 expects normalized "s"'s. + // scheme.normalize_s(temp_sig.as_mut_slice()).unwrap(); + // k256 seems to be normalizing always now + let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, temp_sig.as_slice(), &p); + assert!(result.is_ok()); + assert!(result.unwrap()); + + let (p, s) = EcdsaSecp256k1Sha256::keypair(None).unwrap(); + let signed = EcdsaSecp256k1Sha256::sign(MESSAGE_1, &s).unwrap(); + let result = EcdsaSecp256k1Sha256::verify(MESSAGE_1, &signed, &p); + assert!(result.is_ok()); + assert!(result.unwrap()); + } +} diff --git a/crypto/src/varint.rs b/crypto/src/varint.rs index 66e426b0f6f..364fa5275ce 100644 --- a/crypto/src/varint.rs +++ b/crypto/src/varint.rs @@ -119,8 +119,6 @@ impl VarUint { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - #[cfg(not(feature = "std"))] use alloc::vec; diff --git a/data_model/Cargo.toml b/data_model/Cargo.toml index f5aaf21c9a0..c04c3dba783 100644 --- a/data_model/Cargo.toml +++ b/data_model/Cargo.toml @@ -8,6 +8,9 @@ authors.workspace = true license.workspace = true categories = ["cryptography::cryptocurrencies", "api-bindings"] +[lints] +workspace = true + [badges] is-it-maintained-issue-resolution = { repository = "https://github.com/hyperledger/iroha" } is-it-maintained-open-issues = { repository = "https://github.com/hyperledger/iroha" } @@ -18,7 +21,7 @@ default = ["std"] # Enable static linkage of the rust standard library. # Disabled for WASM interoperability, to reduce the binary size. # Please refer to https://docs.rust-embedded.org/book/intro/no-std.html -std = ["iroha_macro/std", "iroha_version/std", "iroha_crypto/std", "iroha_primitives/std", "thiserror", "displaydoc/std", "strum/std", "dashmap", "tokio"] +std = ["iroha_macro/std", "iroha_version/std", "iroha_crypto/std", "iroha_primitives/std", "thiserror", "displaydoc/std", "strum/std", "once_cell"] # Enable API for HTTP requests. Should be activated for HTTP clients http = ["std", "warp", "iroha_version/http"] # Replace structures and methods with FFI equivalents to facilitate dynamic linkage (mainly used in smartcontracts) @@ -38,8 +41,6 @@ iroha_version = { workspace = true, features = ["derive", "json", "scale"] } iroha_schema = { workspace = true } iroha_ffi = { workspace = true, optional = true } -dashmap = { workspace = true, optional = true } -tokio = { workspace = true, optional = true, features = ["sync", "rt-multi-thread"] } parity-scale-codec = { workspace = true, features = ["derive"] } derive_more = { workspace = true, features = ["as_ref", "display", "constructor", "from_str", "from", "into"] } serde = { workspace = true, features = ["derive"] } @@ -51,6 +52,7 @@ displaydoc = { workspace = true } getset = { workspace = true } strum = { workspace = true, features = ["derive"] } base64 = { workspace = true, features = ["alloc"] } +once_cell = { workspace = true, optional = true } [dev-dependencies] iroha_client = { workspace = true } diff --git a/data_model/benches/time_event_filter.rs b/data_model/benches/time_event_filter.rs index fca4349c97e..27e20605f1e 100644 --- a/data_model/benches/time_event_filter.rs +++ b/data_model/benches/time_event_filter.rs @@ -1,4 +1,4 @@ -#![allow(missing_docs, clippy::restriction)] +#![allow(missing_docs)] use std::time::Duration; diff --git a/data_model/build.rs b/data_model/build.rs index 2d55ca605eb..92b3b826f12 100644 --- a/data_model/build.rs +++ b/data_model/build.rs @@ -6,7 +6,6 @@ fn main() { let ffi_import = std::env::var_os("CARGO_FEATURE_FFI_IMPORT").is_some(); let ffi_export = std::env::var_os("CARGO_FEATURE_FFI_EXPORT").is_some(); - #[allow(clippy::print_stderr)] if ffi_import && ffi_export { println!("cargo:warning=Features `ffi_export` and `ffi_import` are mutually exclusive"); println!("cargo:warning=When both active, `ffi_import` feature takes precedence"); diff --git a/data_model/derive/Cargo.toml b/data_model/derive/Cargo.toml index 0fd8e83b862..5cb877d609f 100644 --- a/data_model/derive/Cargo.toml +++ b/data_model/derive/Cargo.toml @@ -7,19 +7,25 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true [dependencies] -syn = { workspace = true, features = ["default", "full", "extra-traits"] } +syn2 = { workspace = true, features = ["default", "full", "extra-traits", "visit-mut"] } quote = { workspace = true } +darling = { workspace = true } proc-macro2 = { workspace = true } -proc-macro-error = { workspace = true } +manyhow = { workspace = true } iroha_macro_utils = { workspace = true } -serde_json = { workspace = true, features = ["std"] } [dev-dependencies] iroha_data_model = { workspace = true, features = ["http"] } +iroha_schema = { workspace = true } +parity-scale-codec = { workspace = true } +derive_more = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } diff --git a/data_model/derive/src/filter.rs b/data_model/derive/src/filter.rs index f9cbe87c09a..8479c88bfdb 100644 --- a/data_model/derive/src/filter.rs +++ b/data_model/derive/src/filter.rs @@ -1,179 +1,167 @@ -#![allow( - clippy::mixed_read_write_in_expression, - clippy::unwrap_in_result, - clippy::arithmetic_side_effects -)] - -use proc_macro::TokenStream; +use darling::{FromDeriveInput, FromVariant}; +use iroha_macro_utils::Emitter; +use manyhow::emit; +use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn::{ - parse::{Parse, ParseStream}, - punctuated::Punctuated, - Attribute, Generics, Ident, Token, Variant, Visibility, -}; +use syn2::{Generics, Ident, Variant, Visibility}; -pub struct EventEnum { +#[derive(FromDeriveInput)] +#[darling(supports(enum_tuple))] +struct EventEnum { vis: Visibility, ident: Ident, generics: Generics, - variants: Punctuated, + data: darling::ast::Data, } -pub enum EventVariant { - EventField { variant: Ident, field: Ident }, - IdField(Ident), +enum EventVariant { + /// A variant of event that delegates to some other event. Identified by conventional naming of the event types: ending with `Event`. + /// Delegates all the filterting to the corresponding event's filter. + Delegating { + variant_name: Ident, + /// A name of the event this variant delegates to, without the the `Event` suffix + delegated_event_name_base: String, + }, + /// An actual event. Has either an Id or an identifiable object as a payload + /// The presense of the Id field is not required by this macro per se, but will be enfored by `OriginFilter` requiring a `HasOrigin` impl. + Direct(Ident), +} + +impl FromVariant for EventVariant { + fn from_variant(variant: &Variant) -> darling::Result { + let syn2::Fields::Unnamed(fields) = &variant.fields else { + return Err( + darling::Error::custom("Expected an enum with unnamed fields") + .with_span(&variant.fields), + ); + }; + // note: actually, we have only one field in the event variants + // this is not enforced by this macro, but by `IntoSchema` + let Some(first_field_ty) = fields.unnamed.first().map(|v| &v.ty) else { + return Err(darling::Error::custom("Expected at least one field").with_span(&fields)); + }; + let syn2::Type::Path(path) = first_field_ty else { + return Err( + darling::Error::custom("Only identifiers supported as event types") + .with_span(first_field_ty), + ); + }; + let Some(first_field_ty_name) = path.path.get_ident() else { + return Err( + darling::Error::custom("Only identifiers supported as event types") + .with_span(first_field_ty), + ); + }; + + // What clippy suggests is much less readable in this case + #[allow(clippy::option_if_let_else)] + if let Some(delegated_event_name_base) = + first_field_ty_name.to_string().strip_suffix("Event") + { + Ok(EventVariant::Delegating { + variant_name: variant.ident.clone(), + delegated_event_name_base: delegated_event_name_base.to_string(), + }) + } else { + Ok(EventVariant::Direct(variant.ident.clone())) + } + } } impl EventEnum { + fn variants(&self) -> &[EventVariant] { + match &self.data { + darling::ast::Data::Enum(variants) => variants, + _ => unreachable!("BUG: only enums should be here"), + } + } + + fn filter_map_variants Option>(&self, fun: F) -> Vec { + self.variants().iter().filter_map(fun).collect() + } + /// Used to produce fields like `ByAccount(crate::prelude::FilterOpt)` in `DomainEventFilter`. - fn generate_filter_variants_with_event_fields(&self) -> Vec { - self.variants - .iter() - .filter_map(|variant| match variant { - EventVariant::IdField(_) => None, - EventVariant::EventField { - variant: variant_ident, - field: field_ident, - } => { - // E.g. `Account` field in the event => `ByAccount` in the event filter - let filter_variant_ident = format_ident!("By{}", variant_ident); - // E.g. `AccountEvent` inner field from `Account` variant in event => - // `AccountFilter` inside the event filter - let inner_filter_ident = format_ident!( - "{}Filter", - field_ident - .to_string() - .strip_suffix("Event") - .expect("Variant name should have suffix `Event`"), - ); - let import_path = quote! {crate::prelude}; - Some(quote! { - #filter_variant_ident(#import_path::FilterOpt<#inner_filter_ident>) }) - } - }) - .collect() + fn generate_filter_variants_for_delegating_events(&self) -> Vec { + self.filter_map_variants(|variant| { + if let EventVariant::Delegating { + variant_name, + delegated_event_name_base, + } = variant + { + // E.g. `Account` field in the event => `ByAccount` in the event filter + let filter_variant_ident = format_ident!("By{}", variant_name); + // E.g. `AccountEvent` inner field from `Account` variant in event => + // `AccountFilter` inside the event filter + let inner_filter_ident = format_ident!("{}Filter", delegated_event_name_base); + let import_path = quote! {crate::prelude}; + Some(quote! { + #filter_variant_ident(#import_path::FilterOpt<#inner_filter_ident>) + }) + } else { + None + } + }) } /// Used to produce fields like `ByCreated` in `DomainEventFilter`. - fn generate_filter_variants_with_id_fields(&self) -> Vec { - self.variants - .iter() - .filter_map(|variant| match variant { - EventVariant::IdField(event_variant_ident) => { - // Event fields such as `MetadataRemoved` get mapped to `ByMetadataRemoved` - let filter_variant_ident = format_ident!("By{}", event_variant_ident); - Some(filter_variant_ident) - } - EventVariant::EventField { .. } => None, - }) - .collect() + fn generate_filter_variants_for_direct_events(&self) -> Vec { + self.filter_map_variants(|variant| { + if let EventVariant::Direct(event_variant_ident) = variant { + // Event fields such as `MetadataRemoved` get mapped to `ByMetadataRemoved` + let filter_variant_ident = format_ident!("By{}", event_variant_ident); + Some(filter_variant_ident) + } else { + None + } + }) } /// Match arms for `Filter` impls of event filters of the form /// `(Self::ByAccount(filter_opt), crate::prelude::DomainEvent::Account(event)) => {filter_opt.matches(event)}`. - fn generate_filter_impls_with_event_fields(&self) -> Vec { - self.variants - .iter() - .filter_map(|variant| match variant { - EventVariant::IdField(_) => None, - EventVariant::EventField { - variant: event_variant_ident, - .. - } => { - let event_ident = &self.ident; - let filter_variant_ident = format_ident!("By{}", event_variant_ident); - let import_path = quote! {crate::prelude}; - Some(quote! { - (Self::#filter_variant_ident(filter_opt), #import_path::#event_ident::#event_variant_ident(event)) => { - filter_opt.matches(event) - }}) - - }}).collect() + fn generate_filter_arms_for_delegating_events(&self) -> Vec { + self.filter_map_variants(|variant| { + if let EventVariant::Delegating { variant_name, .. } = variant { + let event_ident = &self.ident; + let filter_variant_ident = format_ident!("By{}", variant_name); + let import_path = quote! {crate::prelude}; + Some(quote! { + ( + Self::#filter_variant_ident(filter_opt), + #import_path::#event_ident::#variant_name(event) + ) => { + filter_opt.matches(event) + } + }) + } else { + None + } + }) } /// Match arms for `Filter` impls of event filters of the form /// `(Self::ByCreated, crate::prelude::DomainEvent::Created(_))`. - fn generate_filter_impls_with_id_fields(&self) -> Vec { - self.variants - .iter() - .filter_map(|variant| match variant { - EventVariant::IdField(event_variant_ident) => { - let event_ident = &self.ident; - let filter_variant_ident = format_ident!("By{}", event_variant_ident); - let import_path = quote! {crate::prelude}; - Some( - quote! { - (Self::#filter_variant_ident, #import_path::#event_ident::#event_variant_ident(_)) - }) - }, - EventVariant::EventField { .. } => None, - }) - .collect() - } -} - -impl Parse for EventEnum { - fn parse(input: ParseStream) -> syn::Result { - let _attrs = input.call(Attribute::parse_outer)?; - let vis = input.parse()?; - let _enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(EventVariant::parse)?; - if ident.to_string().ends_with("Event") { - Ok(EventEnum { - vis, - ident, - generics, - variants, - }) - } else { - Err(syn::Error::new_spanned( - ident, - "Bad ident: only derivable for `...Event` enums", - )) - } - } -} - -impl Parse for EventVariant { - fn parse(input: ParseStream) -> syn::Result { - let variant = input.parse::()?; - let variant_ident = variant.ident; - let field_type = variant - .fields - .into_iter() - .next() - .expect("Variant should have at least one unnamed field") - .ty; - if let syn::Type::Path(path) = field_type { - let field_ident = path - .path - .get_ident() - .expect("Should be an ident-convertible path"); - - if field_ident.to_string().ends_with("Event") { - Ok(EventVariant::EventField { - variant: variant_ident, - field: field_ident.clone(), + fn generate_filter_patterns_for_direct_events(&self) -> Vec { + self.filter_map_variants(|variant| { + if let EventVariant::Direct(event_variant_ident) = variant { + let event_ident = &self.ident; + let filter_variant_ident = format_ident!("By{}", event_variant_ident); + let import_path = quote! {crate::prelude}; + Some(quote! { + ( + Self::#filter_variant_ident, + #import_path::#event_ident::#event_variant_ident(_) + ) }) } else { - Ok(EventVariant::IdField(variant_ident)) + None } - } else { - Err(syn::Error::new_spanned( - field_type, - "Unexpected AST type variant", - )) - } + }) } } -/// Generates the filter for the event. E.g. for `AccountEvent`, `AccountFilter` +/// Generates the event filter for the event. E.g. for `AccountEvent`, `AccountEventFilter` /// and its `impl Filter` are generated. -pub fn impl_filter(event: &EventEnum) -> TokenStream { +fn impl_event_filter(event: &EventEnum) -> proc_macro2::TokenStream { let EventEnum { vis, ident: event_ident, @@ -181,92 +169,97 @@ pub fn impl_filter(event: &EventEnum) -> TokenStream { .. } = event; - let event_filter_and_impl = impl_event_filter(event); + let id_variants = event.generate_filter_variants_for_direct_events(); + let event_variants = event.generate_filter_variants_for_delegating_events(); - let filter_ident = format_ident!( - "{}Filter", - event_ident - .to_string() - .strip_suffix("Event") - .expect("Events should follow the naming format") - ); - let event_filter_ident = format_ident!("{}Filter", event_ident); + let id_patterns = event.generate_filter_patterns_for_direct_events(); + let event_arms = event.generate_filter_arms_for_delegating_events(); + let event_filter_ident = format_ident!("{}Filter", event_ident); let import_path = quote! { crate::prelude }; - let fil_opt = quote! { #import_path::FilterOpt }; - let orig_fil = quote! { #import_path::OriginFilter }; let imp_event = quote! { #import_path::#event_ident }; - let filter_doc = format!(" Filter for {event_ident} entity"); + let event_filter_doc = format!(" Event filter for {event_ident} entity"); quote! { iroha_data_model_derive::model_single! { - #[derive(Debug, Clone, PartialEq, Eq, derive_more::Constructor, Decode, Encode, Deserialize, Serialize, IntoSchema)] - #[doc = #filter_doc] - #vis struct #filter_ident #generics { - origin_filter: #fil_opt<#orig_fil<#imp_event>>, - event_filter: #fil_opt<#event_filter_ident> + #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] + #[allow(clippy::enum_variant_names, missing_docs)] + #[doc = #event_filter_doc] + #vis enum #event_filter_ident #generics { + #(#id_variants),*, + #(#event_variants),* } } #[cfg(feature = "transparent_api")] - impl #import_path::Filter for #filter_ident { + impl #import_path::Filter for #event_filter_ident { type Event = #imp_event; - fn matches(&self, event: &Self::Event) -> bool { - self.origin_filter.matches(event) && self.event_filter.matches(event) + fn matches(&self, event: &#imp_event) -> bool { + match (self, event) { + #(#id_patterns)|* => true, + #(#event_arms),* + _ => false, + } } } - - #event_filter_and_impl } - .into() } -/// Generates the event filter for the event. E.g. for `AccountEvent`, `AccountEventFilter` +/// Generates the filter for the event. E.g. for `AccountEvent`, `AccountFilter` /// and its `impl Filter` are generated. -fn impl_event_filter(event: &EventEnum) -> proc_macro2::TokenStream { +pub fn impl_filter(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream { + let Some(event) = emitter.handle(EventEnum::from_derive_input(input)) else { + return quote!(); + }; + let EventEnum { vis, ident: event_ident, generics, .. - } = event; + } = &event; - let id_variants = event.generate_filter_variants_with_id_fields(); - let event_variants = event.generate_filter_variants_with_event_fields(); + let event_filter_and_impl = impl_event_filter(&event); - let id_impls = event.generate_filter_impls_with_id_fields(); - let event_impls = event.generate_filter_impls_with_event_fields(); + let event_base = event_ident.to_string().strip_suffix("Event").map_or_else( + || { + emit!(emitter, event_ident, "Event name should end with `Event`"); + event_ident.to_string() + }, + ToString::to_string, + ); + let filter_ident = format_ident!("{}Filter", event_base); let event_filter_ident = format_ident!("{}Filter", event_ident); + let import_path = quote! { crate::prelude }; + let fil_opt = quote! { #import_path::FilterOpt }; + let orig_fil = quote! { #import_path::OriginFilter }; let imp_event = quote! { #import_path::#event_ident }; - let event_filter_doc = format!(" Event filter for {event_ident} entity"); + let filter_doc = format!(" Filter for {event_ident} entity"); quote! { iroha_data_model_derive::model_single! { - #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] - #[allow(clippy::enum_variant_names, missing_docs)] - #[doc = #event_filter_doc] - #vis enum #event_filter_ident #generics { - #(#id_variants),*, - #(#event_variants),* + #[derive(Debug, Clone, PartialEq, Eq, derive_more::Constructor, Decode, Encode, Deserialize, Serialize, IntoSchema)] + #[doc = #filter_doc] + #vis struct #filter_ident #generics { + origin_filter: #fil_opt<#orig_fil<#imp_event>>, + event_filter: #fil_opt<#event_filter_ident> } } #[cfg(feature = "transparent_api")] - impl #import_path::Filter for #event_filter_ident { + impl #import_path::Filter for #filter_ident { type Event = #imp_event; - fn matches(&self, event: &#imp_event) -> bool { - match (self, event) { - #(#id_impls)|* => true, - #(#event_impls),* - _ => false, - } + fn matches(&self, event: &Self::Event) -> bool { + self.origin_filter.matches(event) && self.event_filter.matches(event) } } + + #event_filter_and_impl } } diff --git a/data_model/derive/src/has_origin.rs b/data_model/derive/src/has_origin.rs index 85dab5114bb..ac2e88833f1 100644 --- a/data_model/derive/src/has_origin.rs +++ b/data_model/derive/src/has_origin.rs @@ -1,105 +1,86 @@ -#![allow( - clippy::str_to_string, - clippy::mixed_read_write_in_expression, - clippy::unwrap_in_result -)] - -use iroha_macro_utils::{attr_struct, AttrParser}; -use proc_macro::TokenStream; -use proc_macro_error::abort; -use quote::quote; -use syn::{ - parse::{Parse, ParseStream}, - parse_quote, - punctuated::Punctuated, - Attribute, Generics, Ident, Token, Type, Variant, Visibility, +use darling::{FromDeriveInput, FromVariant}; +use iroha_macro_utils::{ + attr_struct2, parse_single_list_attr, parse_single_list_attr_opt, Emitter, }; +use proc_macro2::TokenStream; +use quote::quote; +use syn2::{parse_quote, Ident, Token, Type}; mod kw { - syn::custom_keyword!(origin); - syn::custom_keyword!(variant); + syn2::custom_keyword!(origin); } +const HAS_ORIGIN_ATTR: &str = "has_origin"; + pub struct HasOriginEnum { ident: Ident, - variants: Punctuated, + #[allow(unused)] + generics: syn2::Generics, + variants: Vec, origin: Type, } +impl FromDeriveInput for HasOriginEnum { + fn from_derive_input(input: &syn2::DeriveInput) -> darling::Result { + let ident = input.ident.clone(); + let generics = input.generics.clone(); + + let Some(variants) = + darling::ast::Data::::try_from(&input.data)?.take_enum() + else { + return Err(darling::Error::custom("Expected enum")); + }; + + let origin = parse_single_list_attr::(HAS_ORIGIN_ATTR, &input.attrs)?.ty; + + Ok(Self { + ident, + generics, + variants, + origin, + }) + } +} + pub struct HasOriginVariant { ident: Ident, - extractor: Option, + extractor: Option, } -struct HasOriginAttr(core::marker::PhantomData); +impl FromVariant for HasOriginVariant { + fn from_variant(variant: &syn2::Variant) -> darling::Result { + let ident = variant.ident.clone(); + let extractor = parse_single_list_attr_opt(HAS_ORIGIN_ATTR, &variant.attrs)?; -impl AttrParser for HasOriginAttr { - const IDENT: &'static str = "has_origin"; + Ok(Self { ident, extractor }) + } } -attr_struct! { - pub struct Origin { +attr_struct2! { + pub struct OriginAttr { _kw: kw::origin, _eq: Token![=], ty: Type, } } -attr_struct! { - pub struct OriginExtractor { +attr_struct2! { + pub struct OriginExtractorAttr { ident: Ident, _eq: Token![=>], - extractor: syn::Expr, + extractor: syn2::Expr, } } -impl Parse for HasOriginEnum { - fn parse(input: ParseStream) -> syn::Result { - let attrs = input.call(Attribute::parse_outer)?; - let _vis = input.parse::()?; - let _enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - if !generics.params.is_empty() { - abort!(generics, "Generics are not supported"); - } - let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(HasOriginVariant::parse)?; - let origin = attrs - .iter() - .find_map(|attr| HasOriginAttr::::parse(attr).ok()) - .map(|origin| origin.ty) - .expect("Attribute `#[has_origin(origin = Type)]` is required"); - Ok(HasOriginEnum { - ident, - variants, - origin, - }) - } -} +pub fn impl_has_origin(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream { + let Some(enum_) = emitter.handle(HasOriginEnum::from_derive_input(input)) else { + return quote!(); + }; -impl Parse for HasOriginVariant { - fn parse(input: ParseStream) -> syn::Result { - let variant = input.parse::()?; - let Variant { - ident, - fields, - attrs, - .. - } = variant; - match fields { - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => {} - fields => abort!(fields, "Only supports tuple variants with single field"), - }; - let extractor = attrs - .iter() - .find_map(|attr| HasOriginAttr::::parse(attr).ok()); - Ok(HasOriginVariant { ident, extractor }) + if enum_.variants.is_empty() { + return quote!(); } -} -pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { let enum_ident = &enum_.ident; let enum_origin = &enum_.origin; let variants_match_arms = &enum_ @@ -116,10 +97,12 @@ pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { }, ) }) - .collect::>(); + .collect::>(); + + let (impl_generics, ty_generics, where_clause) = enum_.generics.split_for_impl(); quote! { - impl HasOrigin for #enum_ident { + impl #impl_generics HasOrigin for #enum_ident #ty_generics #where_clause { type Origin = #enum_origin; fn origin_id(&self) -> &::Id { @@ -132,5 +115,4 @@ pub fn impl_has_origin(enum_: &HasOriginEnum) -> TokenStream { } } } - .into() } diff --git a/data_model/derive/src/id.rs b/data_model/derive/src/id.rs index afb742b2420..3b40a0da006 100644 --- a/data_model/derive/src/id.rs +++ b/data_model/derive/src/id.rs @@ -1,31 +1,86 @@ -#![allow(clippy::str_to_string, clippy::mixed_read_write_in_expression)] - +use darling::{FromAttributes, FromDeriveInput, FromField}; +use iroha_macro_utils::{find_single_attr_opt, Emitter}; +use manyhow::emit; use proc_macro2::TokenStream; -use proc_macro_error::abort; -use quote::quote; -use syn::parse_quote; +use quote::{quote, ToTokens}; +use syn2::parse_quote; -fn derive_identifiable(input: &syn::ItemStruct) -> TokenStream { - let name = &input.ident; - let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let (id_type, id_expr) = get_id_type(input); +mod kw { + syn2::custom_keyword!(transparent); +} - quote! { - impl #impl_generics Identifiable for #name #ty_generics #where_clause { - type Id = #id_type; +enum IdAttr { + Missing, + Normal, + Transparent, +} - #[inline] - fn id(&self) -> &Self::Id { - #id_expr +impl FromAttributes for IdAttr { + fn from_attributes(attrs: &[syn2::Attribute]) -> darling::Result { + let mut accumulator = darling::error::Accumulator::default(); + let Some(attr) = find_single_attr_opt(&mut accumulator, "id", attrs) else { + return accumulator.finish_with(IdAttr::Missing); + }; + + let result = match &attr.meta { + syn2::Meta::Path(_) => IdAttr::Normal, + syn2::Meta::List(list) if list.parse_args::().is_ok() => { + IdAttr::Transparent + } + _ => { + accumulator.push( + darling::Error::custom("Expected `#[id]` or `#[id(transparent)]`") + .with_span(&attr), + ); + IdAttr::Normal } + }; + + accumulator.finish_with(result) + } +} + +#[derive(FromDeriveInput)] +#[darling(supports(struct_any))] +struct IdDeriveInput { + ident: syn2::Ident, + generics: syn2::Generics, + data: darling::ast::Data, +} + +struct IdField { + ident: Option, + ty: syn2::Type, + id_attr: IdAttr, +} + +impl FromField for IdField { + fn from_field(field: &syn2::Field) -> darling::Result { + let ident = field.ident.clone(); + let ty = field.ty.clone(); + let id_attr = IdAttr::from_attributes(&field.attrs)?; + + Ok(Self { ident, ty, id_attr }) + } +} + +impl IdDeriveInput { + fn fields(&self) -> &darling::ast::Fields { + match &self.data { + darling::ast::Data::Struct(fields) => fields, + _ => unreachable!(), } } } -pub fn impl_id(input: &syn::ItemStruct) -> TokenStream { +pub fn impl_id_eq_ord_hash(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream { + let Some(input) = emitter.handle(IdDeriveInput::from_derive_input(input)) else { + return quote!(); + }; + let name = &input.ident; let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); - let identifiable_derive = derive_identifiable(input); + let identifiable_derive = derive_identifiable(emitter, &input); quote! { #identifiable_derive @@ -58,63 +113,56 @@ pub fn impl_id(input: &syn::ItemStruct) -> TokenStream { } } -fn get_id_type(input: &syn::ItemStruct) -> (TokenStream, TokenStream) { - match &input.fields { - syn::Fields::Named(fields) => { - for field in &fields.named { - let (field_name, field_ty) = (&field.ident, &field.ty); - - if is_identifier(&field.attrs) { - return (quote! {#field_ty}, quote! {&self.#field_name}); - } - if is_transparent(&field.attrs) { - return ( - quote! {<#field_ty as Identifiable>::Id}, - quote! {Identifiable::id(&self.#field_name)}, - ); - } - } - } - syn::Fields::Unnamed(fields) => { - for (i, field) in fields.unnamed.iter().enumerate() { - let (field_id, field_ty): (syn::Index, _) = (i.into(), &field.ty); - - if is_identifier(&field.attrs) { - return (quote! {#field_ty}, quote! {&self.#field_id}); - } - if is_transparent(&field.attrs) { - return ( - quote! {<#field_ty as Identifiable>::Id}, - quote! {Identifiable::id(&self.#field_id)}, - ); - } +fn derive_identifiable(emitter: &mut Emitter, input: &IdDeriveInput) -> TokenStream { + let name = &input.ident; + let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); + let (id_type, id_expr) = get_id_type(emitter, input); + + quote! { + impl #impl_generics Identifiable for #name #ty_generics #where_clause { + type Id = #id_type; + + #[inline] + fn id(&self) -> &Self::Id { + #id_expr } } - syn::Fields::Unit => {} } +} - match &input.fields { - syn::Fields::Named(named) => { - for field in &named.named { - let field_ty = &field.ty; - - if field.ident.as_ref().expect("Field must be named") == "id" { - return (quote! {#field_ty}, quote! {&self.id}); - } +fn get_id_type(emitter: &mut Emitter, input: &IdDeriveInput) -> (syn2::Type, syn2::Expr) { + for (field_index, IdField { ty, ident, id_attr }) in input.fields().iter().enumerate() { + let field_name = ident.as_ref().map_or_else( + || syn2::Index::from(field_index).to_token_stream(), + ToTokens::to_token_stream, + ); + match id_attr { + IdAttr::Normal => { + return (ty.clone(), parse_quote! {&self.#field_name}); + } + IdAttr::Transparent => { + return ( + parse_quote! {<#ty as Identifiable>::Id}, + parse_quote! {Identifiable::id(&self.#field_name)}, + ); + } + IdAttr::Missing => { + // nothing here } } - syn::Fields::Unnamed(_) | syn::Fields::Unit => {} } - abort!(input, "Identifier not found") -} + for field in input.fields().iter() { + if field.ident.as_ref().is_some_and(|i| i == "id") { + return (field.ty.clone(), parse_quote! {&self.id}); + } + } -fn is_identifier(attrs: &[syn::Attribute]) -> bool { - attrs.iter().any(|attr| attr == &parse_quote! {#[id]}) -} + emit!( + emitter, + "Could not find the identifier field. Either mark it with `#[id]` or have it named `id`" + ); -fn is_transparent(attrs: &[syn::Attribute]) -> bool { - attrs - .iter() - .any(|attr| attr == &parse_quote! {#[id(transparent)]}) + // return dummy types + (parse_quote! {()}, parse_quote! {()}) } diff --git a/data_model/derive/src/lib.rs b/data_model/derive/src/lib.rs index 607ff1720e6..daf1b3a9a4f 100644 --- a/data_model/derive/src/lib.rs +++ b/data_model/derive/src/lib.rs @@ -1,14 +1,13 @@ //! A crate containing various derive macros for `data_model` -#![allow(clippy::std_instead_of_core)] - mod filter; mod has_origin; mod id; mod model; mod partially_tagged; -use proc_macro::TokenStream; -use syn::parse_macro_input; +use iroha_macro_utils::Emitter; +use manyhow::{emit, manyhow, Result}; +use proc_macro2::TokenStream; /// Macro which controls how to export item's API. The behaviour is controlled with `transparent_api` /// feature flag. If the flag is active, item's public fields will be exposed as public, however, if @@ -80,19 +79,37 @@ use syn::parse_macro_input; /// ``` /// /// It assumes that the derive is imported and referred to by its original name. +#[manyhow] #[proc_macro_attribute] -#[proc_macro_error::proc_macro_error] -pub fn model(_attr: TokenStream, input: TokenStream) -> TokenStream { - model::impl_model(&parse_macro_input!(input)).into() +pub fn model(attr: TokenStream, input: TokenStream) -> TokenStream { + let mut emitter = Emitter::new(); + + if !attr.is_empty() { + emit!(emitter, attr, "This attribute does not take any arguments"); + } + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = model::impl_model(&mut emitter, &input); + + emitter.finish_token_stream_with(result) } /// Same as [`model`] macro, but only processes a single item. /// /// You should prefer using [`model`] macro over this one. +#[manyhow] #[proc_macro] -#[proc_macro_error::proc_macro_error] pub fn model_single(input: TokenStream) -> TokenStream { - model::process_item(parse_macro_input!(input)).into() + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + emitter.finish_token_stream_with(model::process_item(input)) } /// Derive macro for `Identifiable` trait which also automatically implements [`Ord`], [`Eq`], @@ -209,10 +226,17 @@ pub fn model_single(input: TokenStream) -> TokenStream { /// } /// ``` /// -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(IdEqOrdHash, attributes(id, opaque))] pub fn id_eq_ord_hash(input: TokenStream) -> TokenStream { - id::impl_id(&parse_macro_input!(input)).into() + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = id::impl_id_eq_ord_hash(&mut emitter, &input); + emitter.finish_token_stream_with(result) } /// [`Filter`] is used for code generation of `...Filter` structs and `...EventFilter` enums, as well as @@ -377,10 +401,17 @@ pub fn id_eq_ord_hash(input: TokenStream) -> TokenStream { /// ``` /// /// It assumes that the derive is imported and referred to by its original name. +#[manyhow] #[proc_macro_derive(Filter)] pub fn filter_derive(input: TokenStream) -> TokenStream { - let event = parse_macro_input!(input as filter::EventEnum); - filter::impl_filter(&event) + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = filter::impl_filter(&mut emitter, &input); + emitter.finish_token_stream_with(result) } /// Derive `::serde::Serialize` trait for `enum` with possibility to avoid tags for selected variants @@ -409,10 +440,12 @@ pub fn filter_derive(input: TokenStream) -> TokenStream { /// &serde_json::to_string(&Outer::A(42)).expect("Failed to serialize"), r#"{"A":42}"# /// ); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(PartiallyTaggedSerialize, attributes(serde_partially_tagged, serde))] -pub fn partially_tagged_serialize_derive(input: TokenStream) -> TokenStream { - partially_tagged::impl_partially_tagged_serialize(&parse_macro_input!(input)) +pub fn partially_tagged_serialize_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + partially_tagged::impl_partially_tagged_serialize(&input) } /// Derive `::serde::Deserialize` trait for `enum` with possibility to avoid tags for selected variants @@ -470,10 +503,12 @@ pub fn partially_tagged_serialize_derive(input: TokenStream) -> TokenStream { /// serde_json::from_str::(r#"{"B":42}"#).expect("Failed to deserialize"), Outer::Inner1(Inner::B(42)) /// ); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(PartiallyTaggedDeserialize, attributes(serde_partially_tagged, serde))] -pub fn partially_tagged_deserialize_derive(input: TokenStream) -> TokenStream { - partially_tagged::impl_partially_tagged_deserialize(&parse_macro_input!(input)) +pub fn partially_tagged_deserialize_derive(input: TokenStream) -> Result { + let input = syn2::parse2(input)?; + + partially_tagged::impl_partially_tagged_deserialize(&input) } /// Derive macro for `HasOrigin`. @@ -559,8 +594,16 @@ pub fn partially_tagged_deserialize_derive(input: TokenStream) -> TokenStream { /// assert_eq!(&layer_id, layer_sub_layer_event.origin_id()); /// assert_eq!(&sub_layer_id, sub_layer_created_event.origin_id()); /// ``` -#[proc_macro_error::proc_macro_error] +#[manyhow] #[proc_macro_derive(HasOrigin, attributes(has_origin))] pub fn has_origin_derive(input: TokenStream) -> TokenStream { - has_origin::impl_has_origin(&parse_macro_input!(input)) + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = has_origin::impl_has_origin(&mut emitter, &input); + + emitter.finish_token_stream_with(result) } diff --git a/data_model/derive/src/model.rs b/data_model/derive/src/model.rs index 8a7426baca3..32d163d27cb 100644 --- a/data_model/derive/src/model.rs +++ b/data_model/derive/src/model.rs @@ -1,10 +1,11 @@ +use iroha_macro_utils::Emitter; +use manyhow::emit; use proc_macro2::TokenStream; -use proc_macro_error::abort; use quote::{quote, ToTokens}; -use syn::{parse_quote, Attribute}; +use syn2::{parse_quote, Attribute}; -pub fn impl_model(input: &syn::ItemMod) -> TokenStream { - let syn::ItemMod { +pub fn impl_model(emitter: &mut Emitter, input: &syn2::ItemMod) -> TokenStream { + let syn2::ItemMod { attrs, vis, mod_token, @@ -14,14 +15,17 @@ pub fn impl_model(input: &syn::ItemMod) -> TokenStream { .. } = input; - let syn::Visibility::Public(vis_public) = vis else { - abort!( + let syn2::Visibility::Public(vis_public) = vis else { + emit!( + emitter, input, "The `model` attribute can only be used on public modules" ); + return quote!(); }; if ident != "model" { - abort!( + emit!( + emitter, input, "The `model` attribute can only be used on the `model` module" ); @@ -40,16 +44,16 @@ pub fn impl_model(input: &syn::ItemMod) -> TokenStream { } } -pub fn process_item(item: syn::Item) -> TokenStream { - let mut input: syn::DeriveInput = match item { - syn::Item::Struct(item_struct) => item_struct.into(), - syn::Item::Enum(item_enum) => item_enum.into(), - syn::Item::Union(item_union) => item_union.into(), +pub fn process_item(item: syn2::Item) -> TokenStream { + let mut input: syn2::DeriveInput = match item { + syn2::Item::Struct(item_struct) => item_struct.into(), + syn2::Item::Enum(item_enum) => item_enum.into(), + syn2::Item::Union(item_union) => item_union.into(), other => return other.into_token_stream(), }; let vis = &input.vis; - if matches!(vis, syn::Visibility::Public(_)) { + if matches!(vis, syn2::Visibility::Public(_)) { return process_pub_item(input); } @@ -70,21 +74,21 @@ pub fn process_item(item: syn::Item) -> TokenStream { } } -fn process_pub_item(input: syn::DeriveInput) -> TokenStream { +fn process_pub_item(input: syn2::DeriveInput) -> TokenStream { let (impl_generics, _, where_clause) = input.generics.split_for_impl(); let attrs = input.attrs; let ident = input.ident; match input.data { - syn::Data::Struct(item) => match &item.fields { - syn::Fields::Named(fields) => { + syn2::Data::Struct(item) => match &item.fields { + syn2::Fields::Named(fields) => { let fields = fields.named.iter().map(|field| { let field_attrs = &field.attrs; let field_name = &field.ident; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -107,12 +111,12 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } - syn::Fields::Unnamed(fields) => { + syn2::Fields::Unnamed(fields) => { let fields = fields.unnamed.iter().map(|field| { let field_attrs = &field.attrs; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -133,7 +137,7 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } - syn::Fields::Unit => { + syn2::Fields::Unit => { let item = quote! { pub struct #ident #impl_generics #where_clause; }; @@ -141,7 +145,7 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } }, - syn::Data::Enum(item) => { + syn2::Data::Enum(item) => { let variants = &item.variants; let item = quote! { @@ -153,14 +157,13 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { expose_ffi(attrs, &item) } // Triggers in `quote!` side, see https://github.com/rust-lang/rust-clippy/issues/10417 - #[allow(clippy::arithmetic_side_effects)] - syn::Data::Union(item) => { + syn2::Data::Union(item) => { let fields = item.fields.named.iter().map(|field| { let field_attrs = &field.attrs; let field_name = &field.ident; let field_ty = &field.ty; - if !matches!(field.vis, syn::Visibility::Public(_)) { + if !matches!(field.vis, syn2::Visibility::Public(_)) { return quote! {#field,}; } @@ -176,7 +179,6 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { }); // See https://github.com/rust-lang/rust-clippy/issues/10417 - #[allow(clippy::arithmetic_side_effects)] let item = quote! { pub union #ident #impl_generics #where_clause { #(#fields),* @@ -189,7 +191,9 @@ fn process_pub_item(input: syn::DeriveInput) -> TokenStream { } fn expose_ffi(mut attrs: Vec, item: &TokenStream) -> TokenStream { - let mut ffi_attrs = attrs.iter().filter(|&attr| attr.path.is_ident("ffi_type")); + let mut ffi_attrs = attrs + .iter() + .filter(|&attr| attr.path().is_ident("ffi_type")); if ffi_attrs.next().is_none() { return quote! { @@ -201,7 +205,7 @@ fn expose_ffi(mut attrs: Vec, item: &TokenStream) -> TokenStream { attrs.retain(|attr| *attr != parse_quote! (#[ffi_type])); let no_ffi_attrs: Vec<_> = attrs .iter() - .filter(|&attr| !attr.path.is_ident("ffi_type")) + .filter(|&attr| !attr.path().is_ident("ffi_type")) .collect(); quote! { diff --git a/data_model/derive/src/partially_tagged.rs b/data_model/derive/src/partially_tagged/mod.rs similarity index 78% rename from data_model/derive/src/partially_tagged.rs rename to data_model/derive/src/partially_tagged/mod.rs index 845520f0670..3e40d4518f0 100644 --- a/data_model/derive/src/partially_tagged.rs +++ b/data_model/derive/src/partially_tagged/mod.rs @@ -1,123 +1,96 @@ #![allow(clippy::too_many_lines)] -use proc_macro::TokenStream; -use proc_macro_error::abort; +// darling-generated code triggers this lint +#![allow(clippy::option_if_let_else)] + +mod resolve_self; + +use darling::{FromDeriveInput, FromVariant}; +use manyhow::Result; +use proc_macro2::TokenStream; use quote::{format_ident, quote}; -use syn::{ - parse::{Parse, ParseStream}, - parse_quote, - punctuated::Punctuated, - Attribute, Generics, Ident, Token, Type, Variant, Visibility, -}; +use syn2::{parse_quote, Attribute, Generics, Ident, Type}; +#[derive(FromDeriveInput)] +#[darling(forward_attrs(serde), supports(enum_newtype))] pub struct PartiallyTaggedEnum { - attrs: Vec, ident: Ident, - variants: Punctuated, generics: Generics, + data: darling::ast::Data, + attrs: Vec, } +#[derive(FromVariant)] +#[darling(forward_attrs(serde), attributes(serde_partially_tagged))] pub struct PartiallyTaggedVariant { - attrs: Vec, ident: Ident, - ty: Type, - is_untagged: bool, + fields: darling::ast::Fields, + attrs: Vec, + #[darling(default)] + untagged: bool, } -impl Parse for PartiallyTaggedEnum { - fn parse(input: ParseStream) -> syn::Result { - let mut attrs = input.call(Attribute::parse_outer)?; - let _vis = input.parse::()?; - let _enum_token = input.parse::()?; - let ident = input.parse::()?; - let generics = input.parse::()?; - let content; - let _brace_token = syn::braced!(content in input); - let variants = content.parse_terminated(PartiallyTaggedVariant::parse)?; - attrs.retain(is_serde_attr); - Ok(PartiallyTaggedEnum { - attrs, - ident, - variants, - generics, - }) +impl PartiallyTaggedEnum { + fn variants(&self) -> impl Iterator { + match &self.data { + darling::ast::Data::Enum(variants) => variants.iter(), + _ => unreachable!( + "Only enums are supported. Enforced by `darling(supports(enum_newtype))`" + ), + } } -} -impl Parse for PartiallyTaggedVariant { - fn parse(input: ParseStream) -> syn::Result { - let variant = input.parse::()?; - let Variant { - ident, - fields, - mut attrs, - .. - } = variant; - let field = match fields { - syn::Fields::Unnamed(fields) if fields.unnamed.len() == 1 => fields - .unnamed - .into_iter() - .next() - .expect("Guaranteed to have exactly one field"), - fields => abort!(fields, "Only supports tuple variants with single field"), - }; - let ty = field.ty; - let is_untagged = attrs.iter().any(is_untagged_attr); - attrs.retain(is_serde_attr); - Ok(PartiallyTaggedVariant { - attrs, - ident, - ty, - is_untagged, - }) + fn untagged_variants(&self) -> impl Iterator { + self.variants().filter(|variant| variant.untagged) } -} -impl PartiallyTaggedEnum { - fn variants(&self) -> impl Iterator { - self.variants.iter() + /// Returns a type that corresponds to `Self`, handling the generics as necessary + fn self_ty(&self) -> syn2::Type { + let ident = &self.ident; + let (_, type_generics, _) = self.generics.split_for_impl(); + + parse_quote!(#ident #type_generics) } +} - fn untagged_variants(&self) -> impl Iterator { - self.variants.iter().filter(|variant| variant.is_untagged) +impl PartiallyTaggedVariant { + fn ty(&self, self_ty: &syn2::Type) -> syn2::Type { + let ty = self.fields.fields.first().expect( + "BUG: Only newtype enums are supported. Enforced by `darling(supports(enum_newtype))`", + ).clone(); + + resolve_self::resolve_self(self_ty, ty) } } /// Convert from vector of variants to tuple of vectors consisting of variant's fields fn variants_to_tuple<'lt, I: Iterator>( + self_ty: &syn2::Type, variants: I, -) -> (Vec<&'lt Ident>, Vec<&'lt Type>, Vec<&'lt [Attribute]>) { +) -> (Vec<&'lt Ident>, Vec, Vec<&'lt [Attribute]>) { variants.fold( (Vec::new(), Vec::new(), Vec::new()), |(mut idents, mut types, mut attrs), variant| { idents.push(&variant.ident); - types.push(&variant.ty); + types.push(variant.ty(self_ty)); attrs.push(&variant.attrs); (idents, types, attrs) }, ) } -/// Check if enum variant should be treated as untagged -fn is_untagged_attr(attr: &Attribute) -> bool { - attr == &parse_quote!(#[serde_partially_tagged(untagged)]) -} - -/// Check if `#[serde...]` attribute -fn is_serde_attr(attr: &Attribute) -> bool { - attr.path - .get_ident() - .map_or_else(|| false, |ident| ident.to_string().eq("serde")) -} +pub fn impl_partially_tagged_serialize(input: &syn2::DeriveInput) -> Result { + let enum_ = PartiallyTaggedEnum::from_derive_input(input)?; -pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStream { let enum_ident = &enum_.ident; let enum_attrs = &enum_.attrs; let ref_internal_repr_ident = format_ident!("{}RefInternalRepr", enum_ident); let ser_helper = format_ident!("{}SerializeHelper", enum_ident); - let (variants_ident, variants_ty, variants_attrs) = variants_to_tuple(enum_.variants()); + let self_ty = enum_.self_ty(); + let (variants_ident, variants_ty, variants_attrs) = + variants_to_tuple(&self_ty, enum_.variants()); let (untagged_variants_ident, untagged_variants_ty, untagged_variants_attrs) = - variants_to_tuple(enum_.untagged_variants()); - let serialize_trait_bound: syn::TypeParamBound = parse_quote!(::serde::Serialize); + variants_to_tuple(&self_ty, enum_.untagged_variants()); + let serialize_trait_bound: syn2::TypeParamBound = parse_quote!(::serde::Serialize); let mut generics = enum_.generics.clone(); generics .type_params_mut() @@ -128,7 +101,7 @@ pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStre let (ref_internal_impl_generics, ref_internal_type_generics, ref_internal_where_clause) = ref_internal_generics.split_for_impl(); - quote! { + Ok(quote! { impl #impl_generics ::serde::Serialize for #enum_ident #type_generics #where_clause { fn serialize(&self, serializer: S) -> Result where @@ -176,21 +149,24 @@ pub fn impl_partially_tagged_serialize(enum_: &PartiallyTaggedEnum) -> TokenStre wrapper.serialize(serializer) } } - } - .into() + }) } -pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenStream { +pub fn impl_partially_tagged_deserialize(input: &syn2::DeriveInput) -> Result { + let enum_ = PartiallyTaggedEnum::from_derive_input(input)?; + let enum_ident = &enum_.ident; let enum_attrs = &enum_.attrs; let internal_repr_ident = format_ident!("{}InternalRepr", enum_ident); let deser_helper = format_ident!("{}DeserializeHelper", enum_ident); let no_successful_untagged_variant_match = - format!("Data did not match any variant of enum {}", deser_helper); - let (variants_ident, variants_ty, variants_attrs) = variants_to_tuple(enum_.variants()); + format!("Data did not match any variant of enum {deser_helper}"); + let self_ty = enum_.self_ty(); + let (variants_ident, variants_ty, variants_attrs) = + variants_to_tuple(&self_ty, enum_.variants()); let (untagged_variants_ident, untagged_variants_ty, untagged_variants_attrs) = - variants_to_tuple(enum_.untagged_variants()); - let deserialize_trait_bound: syn::TypeParamBound = parse_quote!(::serde::de::DeserializeOwned); + variants_to_tuple(&self_ty, enum_.untagged_variants()); + let deserialize_trait_bound: syn2::TypeParamBound = parse_quote!(::serde::de::DeserializeOwned); let variants_ty_deserialize_bound = variants_ty .iter() .map(|ty| quote!(#ty: #deserialize_trait_bound).to_string()) @@ -207,7 +183,7 @@ pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenSt let (internal_repr_impl_generics, internal_repr_type_generics, internal_repr_where_clause) = internal_repr_generics.split_for_impl(); - quote! { + Ok(quote! { impl #impl_generics ::serde::Deserialize<'de> for #enum_ident #type_generics #where_clause { fn deserialize(deserializer: D) -> Result where @@ -342,6 +318,5 @@ pub fn impl_partially_tagged_deserialize(enum_: &PartiallyTaggedEnum) -> TokenSt } } } - } - .into() + }) } diff --git a/data_model/derive/src/partially_tagged/resolve_self.rs b/data_model/derive/src/partially_tagged/resolve_self.rs new file mode 100644 index 00000000000..862802cdd66 --- /dev/null +++ b/data_model/derive/src/partially_tagged/resolve_self.rs @@ -0,0 +1,63 @@ +use syn2::visit_mut::VisitMut; + +struct Visitor<'a> { + self_ty: &'a syn2::Type, +} + +impl VisitMut for Visitor<'_> { + fn visit_type_mut(&mut self, ty: &mut syn2::Type) { + match ty { + syn2::Type::Path(path_ty) + if path_ty.qself.is_none() && path_ty.path.is_ident("Self") => + { + *ty = self.self_ty.clone(); + } + _ => syn2::visit_mut::visit_type_mut(self, ty), + } + } +} + +/// Transforms the [`resolving_ty`] by replacing `Self` with [`self_ty`]. +/// +/// This is required to be able to use `Self` in `PartiallyTaggedSerialize` and `PartiallyTaggedDeserialize`, +/// as they define an additional intermediate type during serialization/deserialization. Using `Self` there would refer to an incorrect type. +pub fn resolve_self(self_ty: &syn2::Type, mut resolving_ty: syn2::Type) -> syn2::Type { + Visitor { self_ty }.visit_type_mut(&mut resolving_ty); + resolving_ty +} + +#[cfg(test)] +mod tests { + use quote::ToTokens; + use syn2::{parse_quote, Type}; + + #[test] + fn test_resolve_self() { + let test_types = [ + parse_quote!(i32), + parse_quote!(Self), + parse_quote!(Vec), + parse_quote!((Self, Self)), + parse_quote!(::Type), + ]; + let expected_types = [ + parse_quote!(i32), + parse_quote!(()), + parse_quote!(Vec<()>), + parse_quote!(((), ())), + parse_quote!(<() as Trait>::Type), + ]; + let _: &Type = &test_types[0]; + let _: &Type = &expected_types[0]; + + for (test_type, expected_type) in test_types.iter().zip(expected_types.iter()) { + let resolved = super::resolve_self(&parse_quote!(()), test_type.clone()); + assert_eq!( + resolved, + *expected_type, + "Failed to resolve `Self` in `{}`", + test_type.to_token_stream() + ); + } + } +} diff --git a/data_model/derive/tests/has_origin.rs b/data_model/derive/tests/has_origin.rs new file mode 100644 index 00000000000..0fa7ed37a66 --- /dev/null +++ b/data_model/derive/tests/has_origin.rs @@ -0,0 +1,52 @@ +use iroha_data_model::prelude::{HasOrigin, Identifiable}; +use iroha_data_model_derive::{HasOrigin, IdEqOrdHash}; + +#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct ObjectId(pub i32); + +// fake impl for `#[derive(IdEqOrdHash)]` +impl From for iroha_data_model::IdBox { + fn from(_: ObjectId) -> Self { + unimplemented!("fake impl") + } +} + +#[derive(Debug, IdEqOrdHash)] +struct Object { + id: ObjectId, +} + +impl Object { + fn id(&self) -> &ObjectId { + &self.id + } +} + +#[allow(clippy::enum_variant_names)] // it's a test, duh +#[derive(Debug, HasOrigin)] +#[has_origin(origin = Object)] +enum ObjectEvent { + EventWithId(ObjectId), + #[has_origin(event => &event.0)] + EventWithExtractor((ObjectId, i32)), + #[has_origin(obj => obj.id())] + EventWithAnotherExtractor(Object), +} + +#[test] +fn has_origin() { + let events = vec![ + ObjectEvent::EventWithId(ObjectId(1)), + ObjectEvent::EventWithExtractor((ObjectId(2), 2)), + ObjectEvent::EventWithAnotherExtractor(Object { id: ObjectId(3) }), + ]; + let expected_ids = vec![ObjectId(1), ObjectId(2), ObjectId(3)]; + + for (event, expected_id) in events.into_iter().zip(expected_ids) { + assert_eq!( + event.origin_id(), + &expected_id, + "mismatched origin id for event {event:?}", + ); + } +} diff --git a/data_model/derive/tests/has_origin_generics.rs b/data_model/derive/tests/has_origin_generics.rs new file mode 100644 index 00000000000..a83678a2fe2 --- /dev/null +++ b/data_model/derive/tests/has_origin_generics.rs @@ -0,0 +1,52 @@ +use iroha_data_model::prelude::{HasOrigin, Identifiable}; +use iroha_data_model_derive::{HasOrigin, IdEqOrdHash}; + +#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct ObjectId(pub i32); + +// fake impl for `#[derive(IdEqOrdHash)]` +impl From for iroha_data_model::IdBox { + fn from(_: ObjectId) -> Self { + unimplemented!("fake impl") + } +} + +#[derive(Debug, IdEqOrdHash)] +struct Object { + id: ObjectId, +} + +impl Object { + fn id(&self) -> &ObjectId { + &self.id + } +} + +#[allow(clippy::enum_variant_names)] // it's a test, duh +#[derive(Debug, HasOrigin)] +#[has_origin(origin = Object)] +enum ObjectEvent> { + EventWithId(ObjectId), + #[has_origin(event => &event.0)] + EventWithExtractor((ObjectId, i32)), + #[has_origin(obj => obj.id())] + EventWithAnotherExtractor(T), +} + +#[test] +fn has_origin() { + let events = vec![ + ObjectEvent::EventWithId(ObjectId(1)), + ObjectEvent::EventWithExtractor((ObjectId(2), 2)), + ObjectEvent::EventWithAnotherExtractor(Object { id: ObjectId(3) }), + ]; + let expected_ids = vec![ObjectId(1), ObjectId(2), ObjectId(3)]; + + for (event, expected_id) in events.into_iter().zip(expected_ids) { + assert_eq!( + event.origin_id(), + &expected_id, + "mismatched origin id for event {event:?}", + ); + } +} diff --git a/data_model/derive/tests/id_eq_ord_hash.rs b/data_model/derive/tests/id_eq_ord_hash.rs new file mode 100644 index 00000000000..91e94df415d --- /dev/null +++ b/data_model/derive/tests/id_eq_ord_hash.rs @@ -0,0 +1,117 @@ +//! Basic tests for traits derived by [`IdEqOrdHash`] macro + +use std::collections::BTreeSet; + +use iroha_data_model_derive::IdEqOrdHash; + +/// fake `Identifiable` trait +/// +/// Doesn't require `Into` implementation +pub trait Identifiable: Ord + Eq { + /// Type of the entity identifier + type Id: Ord + Eq + core::hash::Hash; + + /// Get reference to the type identifier + fn id(&self) -> &Self::Id; +} + +#[derive(Debug, Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +struct ObjectId(char); + +#[derive(Debug, IdEqOrdHash)] +struct Object { + id: ObjectId, + #[allow(unused)] + data: i32, +} +#[derive(Debug, IdEqOrdHash)] +struct ObjectWithExplicitId { + #[id] + definitely_not_id: ObjectId, + #[allow(unused)] + data: i32, +} +#[derive(Debug, IdEqOrdHash)] +struct ObjectWithTransparentId { + #[id(transparent)] // delegate the id to `Object` type + definitely_not_id: Object, + #[allow(unused)] + data: i32, +} + +// some objects to play with in tests +const ID_A: ObjectId = ObjectId('A'); +const ID_B: ObjectId = ObjectId('B'); +const OBJECT_1A: Object = Object { id: ID_A, data: 1 }; +const OBJECT_1B: Object = Object { id: ID_B, data: 1 }; +const OBJECT_2A: Object = Object { id: ID_A, data: 2 }; +const EXPLICIT_OBJECT_1A: ObjectWithExplicitId = ObjectWithExplicitId { + definitely_not_id: ID_A, + data: 1, +}; +const EXPLICIT_OBJECT_1B: ObjectWithExplicitId = ObjectWithExplicitId { + definitely_not_id: ID_B, + data: 1, +}; +const EXPLICIT_OBJECT_2A: ObjectWithExplicitId = ObjectWithExplicitId { + definitely_not_id: ID_A, + data: 2, +}; +const TRANSPARENT_OBJECT_1A: ObjectWithTransparentId = ObjectWithTransparentId { + definitely_not_id: OBJECT_1A, + data: 1, +}; +const TRANSPARENT_OBJECT_1B: ObjectWithTransparentId = ObjectWithTransparentId { + definitely_not_id: OBJECT_1B, + data: 1, +}; +const TRANSPARENT_OBJECT_2A: ObjectWithTransparentId = ObjectWithTransparentId { + definitely_not_id: OBJECT_2A, + data: 2, +}; + +#[test] +fn id() { + assert_eq!(OBJECT_1A.id(), &ID_A); + assert_eq!(OBJECT_1B.id(), &ID_B); + assert_eq!(EXPLICIT_OBJECT_1A.id(), &ID_A); + assert_eq!(EXPLICIT_OBJECT_1B.id(), &ID_B); + assert_eq!(TRANSPARENT_OBJECT_1A.id(), &ID_A); + assert_eq!(TRANSPARENT_OBJECT_1B.id(), &ID_B); +} + +#[test] +fn id_eq() { + assert_eq!(OBJECT_1A, OBJECT_2A); + assert_ne!(OBJECT_1B, OBJECT_2A); + assert_eq!(EXPLICIT_OBJECT_1A, EXPLICIT_OBJECT_2A); + assert_ne!(EXPLICIT_OBJECT_1B, EXPLICIT_OBJECT_2A); + assert_eq!(TRANSPARENT_OBJECT_1A, TRANSPARENT_OBJECT_2A); + assert_ne!(TRANSPARENT_OBJECT_1B, TRANSPARENT_OBJECT_2A); +} + +#[test] +fn id_ord() { + assert!(OBJECT_1A < OBJECT_1B); + assert!(OBJECT_1B > OBJECT_1A); + assert!(EXPLICIT_OBJECT_1A < EXPLICIT_OBJECT_1B); + assert!(EXPLICIT_OBJECT_1B > EXPLICIT_OBJECT_1A); + assert!(TRANSPARENT_OBJECT_1A < TRANSPARENT_OBJECT_1B); + assert!(TRANSPARENT_OBJECT_1B > TRANSPARENT_OBJECT_1A); +} + +#[test] +fn id_hash() { + let mut set = BTreeSet::new(); + set.insert(OBJECT_1A); + set.insert(OBJECT_2A); + assert_eq!(set.len(), 1); + assert!(set.contains(&OBJECT_1A)); + assert!(!set.contains(&OBJECT_1B)); + assert!(set.contains(&OBJECT_2A)); + set.insert(OBJECT_1B); + assert_eq!(set.len(), 2); + assert!(set.contains(&OBJECT_1A)); + assert!(set.contains(&OBJECT_1B)); + assert!(set.contains(&OBJECT_2A)); +} diff --git a/data_model/derive/tests/partial_tagged_serde.rs b/data_model/derive/tests/partial_tagged_serde.rs new file mode 100644 index 00000000000..7793367e790 --- /dev/null +++ b/data_model/derive/tests/partial_tagged_serde.rs @@ -0,0 +1,86 @@ +use std::fmt::Formatter; + +use iroha_data_model_derive::{PartiallyTaggedDeserialize, PartiallyTaggedSerialize}; +use serde::{de, Deserialize, Deserializer, Serialize, Serializer}; + +#[allow(variant_size_differences)] // it's a test, duh +#[derive(Debug, PartialEq, Eq, PartiallyTaggedDeserialize, PartiallyTaggedSerialize)] +enum Value { + Bool(bool), + #[serde(rename = "StringRenamed")] + String(String), + #[serde_partially_tagged(untagged)] + Numeric(NumericValue), +} + +// a simpler version of NumericValue than used in data_model +// this one is always i32, but is still serialized as a string literal +// NOTE: debug is actually required for `PartiallyTaggedDeserialize`! +#[derive(Debug, PartialEq, Eq)] +struct NumericValue(i32); + +impl Serialize for NumericValue { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + serializer.serialize_str(&self.0.to_string()) + } +} + +struct NumericValueVisitor; + +impl de::Visitor<'_> for NumericValueVisitor { + type Value = NumericValue; + + fn expecting(&self, formatter: &mut Formatter) -> std::fmt::Result { + formatter.write_str("a string literal containing a number") + } + + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + let parsed = v.parse::().map_err(|e| E::custom(e))?; + + Ok(NumericValue(parsed)) + } +} + +impl<'de> Deserialize<'de> for NumericValue { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + deserializer.deserialize_str(NumericValueVisitor) + } +} + +#[test] +fn partially_tagged_serde() { + let values = [ + Value::Bool(true), + Value::String("I am string".to_owned()), + Value::Numeric(NumericValue(42)), + ]; + let serialized_values = [ + r#"{"Bool":true}"#, + r#"{"StringRenamed":"I am string"}"#, + r#""42""#, + ]; + + for (value, serialized_value) in values.iter().zip(serialized_values.iter()) { + let serialized = serde_json::to_string(value) + .unwrap_or_else(|e| panic!("Failed to serialize `{value:?}`: {e:?}")); + assert_eq!( + serialized, *serialized_value, + "Serialized form of `{value:?}` does not match the expected value", + ); + let deserialized: Value = serde_json::from_str(serialized_value) + .unwrap_or_else(|e| panic!("Failed to deserialize `{serialized_value:?}`: {e:?}")); + assert_eq!( + *value, deserialized, + "Deserialized form of `{value:?}` does not match the expected value", + ); + } +} diff --git a/data_model/derive/tests/partial_tagged_serde_self.rs b/data_model/derive/tests/partial_tagged_serde_self.rs new file mode 100644 index 00000000000..1600b04453a --- /dev/null +++ b/data_model/derive/tests/partial_tagged_serde_self.rs @@ -0,0 +1,37 @@ +//! A test for `PartiallyTaggedSerialize` and `PartiallyTaggedDeserialize` which uses `Self` as a type + +use iroha_data_model_derive::{PartiallyTaggedDeserialize, PartiallyTaggedSerialize}; + +#[derive(Debug, PartialEq, Eq, PartiallyTaggedSerialize, PartiallyTaggedDeserialize)] +enum Expr { + Negate(Box), + #[serde_partially_tagged(untagged)] + Atom(T), +} + +#[test] +fn partially_tagged_serde() { + use Expr::*; + + let values = [ + Atom(42), + Negate(Box::new(Atom(42))), + Negate(Box::new(Negate(Box::new(Atom(42))))), + ]; + let serialized_values = [r#"42"#, r#"{"Negate":42}"#, r#"{"Negate":{"Negate":42}}"#]; + + for (value, serialized_value) in values.iter().zip(serialized_values.iter()) { + let serialized = serde_json::to_string(value) + .unwrap_or_else(|e| panic!("Failed to serialize `{value:?}`: {e:?}")); + assert_eq!( + serialized, *serialized_value, + "Serialized form of `{value:?}` does not match the expected value", + ); + let deserialized: Expr = serde_json::from_str(serialized_value) + .unwrap_or_else(|e| panic!("Failed to deserialize `{serialized_value:?}`: {e:?}")); + assert_eq!( + *value, deserialized, + "Deserialized form of `{value:?}` does not match the expected value" + ); + } +} diff --git a/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.rs b/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.rs new file mode 100644 index 00000000000..ad09416af20 --- /dev/null +++ b/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.rs @@ -0,0 +1,9 @@ +use iroha_data_model_derive::HasOrigin; + +#[derive(HasOrigin)] +#[has_origin(origin = Object)] +#[has_origin(origin = Object)] +#[has_origin(origin = Object)] +enum MultipleAttributes {} + +fn main() {} diff --git a/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.stderr b/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.stderr new file mode 100644 index 00000000000..35511493350 --- /dev/null +++ b/data_model/derive/tests/ui_fail/has_origin_multiple_attributes.stderr @@ -0,0 +1,6 @@ +error: Only one #[has_origin] attribute is allowed! + --> tests/ui_fail/has_origin_multiple_attributes.rs:5:1 + | +5 | / #[has_origin(origin = Object)] +6 | | #[has_origin(origin = Object)] + | |______________________________^ diff --git a/data_model/derive/tests/ui_pass/filter.rs b/data_model/derive/tests/ui_pass/filter.rs new file mode 100644 index 00000000000..94dccc72e95 --- /dev/null +++ b/data_model/derive/tests/ui_pass/filter.rs @@ -0,0 +1,106 @@ +//! A smoke-test for the `derive(Filter)` + +use iroha_data_model::{ + prelude::{HasOrigin, Identifiable}, + IdBox, +}; +use iroha_data_model_derive::{Filter, IdEqOrdHash}; +use iroha_schema::IntoSchema; +use parity_scale_codec::{Decode, Encode}; +use serde::{Deserialize, Serialize}; + +// These are dummy types for the FilterDerive to work +// They would not work with `feature = transparent_api`, but are enough for the smoke test +mod prelude { + use iroha_schema::IntoSchema; + use parity_scale_codec::{Decode, Encode}; + use serde::{Deserialize, Serialize}; + + #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Encode, Decode, IntoSchema)] + pub struct FilterOpt(T); + + #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Encode, Decode, IntoSchema)] + pub struct OriginFilter(T); + + pub use super::LayerEvent; +} + +#[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + Encode, + Decode, + IntoSchema, +)] +pub struct SubLayerEvent; + +#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Encode, Decode, IntoSchema)] +pub struct SubLayerFilter; + +#[derive( + Copy, + Clone, + IntoSchema, + Ord, + PartialOrd, + Eq, + PartialEq, + Serialize, + Deserialize, + Decode, + Encode, + Debug, + Hash, +)] +pub struct LayerId { + name: u32, +} + +impl HasOrigin for LayerEvent { + type Origin = Layer; + + fn origin_id(&self) -> &::Id { + todo!() + } +} + +#[derive(Debug, IdEqOrdHash)] +pub struct Layer { + id: LayerId, +} + +impl From for IdBox { + fn from(_: LayerId) -> Self { + unreachable!() + } +} + +/// The tested type +#[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + Encode, + Decode, + IntoSchema, + Filter, +)] +pub enum LayerEvent { + SubLayer(SubLayerEvent), + Created(LayerId), +} + +fn main() {} diff --git a/data_model/src/account.rs b/data_model/src/account.rs index e652b8f63c7..c66163920c0 100644 --- a/data_model/src/account.rs +++ b/data_model/src/account.rs @@ -1,6 +1,4 @@ //! Structures, traits and impls related to `Account`s. -#![allow(clippy::std_instead_of_alloc)] - #[cfg(not(feature = "std"))] use alloc::{ collections::{btree_map, btree_set}, @@ -12,9 +10,10 @@ use core::str::FromStr; #[cfg(feature = "std")] use std::collections::{btree_map, btree_set}; -use derive_more::{Constructor, DebugCustom, Display}; +use derive_more::{DebugCustom, Display}; use getset::Getters; use iroha_data_model_derive::{model, IdEqOrdHash}; +use iroha_primitives::{const_vec::ConstVec, must_use::MustUse}; use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; @@ -27,10 +26,9 @@ use crate::{ AssetsMap, }, domain::prelude::*, - expression::{ContainsAny, ContextValue, EvaluatesTo}, metadata::Metadata, - role::{prelude::RoleId, RoleIds}, - HasMetadata, Identifiable, Name, ParseError, PublicKey, Registered, + name::Name, + HasMetadata, Identifiable, ParseError, PublicKey, Registered, }; /// API to work with collections of [`Id`] : [`Account`] mappings. @@ -44,18 +42,6 @@ pub type AccountsMap = btree_map::BTreeMap; // of space, over `Vec`. type Signatories = btree_set::BTreeSet; -/// The context value name for transaction signatories. -#[cfg(feature = "transparent_api")] -pub const TRANSACTION_SIGNATORIES_VALUE: &str = "transaction_signatories"; -#[cfg(not(feature = "transparent_api"))] -const TRANSACTION_SIGNATORIES_VALUE: &str = "transaction_signatories"; - -/// The context value name for account signatories. -#[cfg(feature = "transparent_api")] -pub const ACCOUNT_SIGNATORIES_VALUE: &str = "account_signatories"; -#[cfg(not(feature = "transparent_api"))] -const ACCOUNT_SIGNATORIES_VALUE: &str = "account_signatories"; - #[model] pub mod model { use super::*; @@ -78,7 +64,6 @@ pub mod model { PartialOrd, Ord, Hash, - Constructor, Getters, Decode, Encode, @@ -91,10 +76,10 @@ pub mod model { #[getset(get = "pub")] #[ffi_type] pub struct AccountId { - /// [`Account`]'s name. - pub name: Name, /// [`Account`]'s [`Domain`](`crate::domain::Domain`) id. pub domain_id: DomainId, + /// [`Account`]'s name. + pub name: Name, } /// Account entity is an authority which is used to execute `Iroha Special Instructions`. @@ -125,8 +110,6 @@ pub mod model { pub signature_check_condition: SignatureCheckCondition, /// Metadata of this account as a key-value store. pub metadata: Metadata, - /// Roles of this account, they are tags for sets of permissions stored in `World`. - pub roles: RoleIds, } /// Builder which should be submitted in a transaction to create a new [`Account`] @@ -154,18 +137,28 @@ pub mod model { Eq, PartialOrd, Ord, - Constructor, Decode, Encode, Deserialize, Serialize, IntoSchema, )] - #[serde(transparent)] - #[repr(transparent)] - // SAFETY: `SignatureCheckCondition` has no trap representation in `EvalueatesTo` - #[ffi_type(unsafe {robust})] - pub struct SignatureCheckCondition(pub EvaluatesTo); + #[ffi_type(opaque)] + #[allow(clippy::enum_variant_names)] + pub enum SignatureCheckCondition { + #[display(fmt = "AnyAccountSignatureOr({_0:?})")] + AnyAccountSignatureOr(ConstVec), + #[display(fmt = "AllAccountSignaturesAnd({_0:?})")] + AllAccountSignaturesAnd(ConstVec), + } +} + +impl AccountId { + /// Construct [`Self`]. + // NOTE: not derived to preserve order of fields in which [`Self`] is parsed from string + pub fn new(name: Name, domain_id: DomainId) -> Self { + Self { domain_id, name } + } } impl Account { @@ -197,23 +190,11 @@ impl Account { self.assets.values() } - /// Get an iterator over [`role ids`](RoleId) of the `Account` - #[inline] - pub fn roles(&self) -> impl ExactSizeIterator { - self.roles.iter() - } - /// Return `true` if the `Account` contains the given signatory #[inline] pub fn contains_signatory(&self, signatory: &PublicKey) -> bool { self.signatories.contains(signatory) } - - /// Return `true` if `Account` contains the given role - #[inline] - pub fn contains_role(&self, role_id: &RoleId) -> bool { - self.roles.contains(role_id) - } } #[cfg(feature = "transparent_api")] @@ -229,22 +210,6 @@ impl Account { pub fn remove_asset(&mut self, asset_id: &AssetId) -> Option { self.assets.remove(asset_id) } - - /// Add [`Role`](crate::role::Role) into the [`Account`]. - /// - /// If `Account` did not have this role present, `true` is returned. - /// If `Account` did have this role present, `false` is returned. - #[inline] - pub fn add_role(&mut self, role_id: RoleId) -> bool { - self.roles.insert(role_id) - } - - /// Remove a role from the `Account` and return whether the role was present in the `Account` - #[inline] - pub fn remove_role(&mut self, role_id: &RoleId) -> bool { - self.roles.remove(role_id) - } - /// Add [`signatory`](PublicKey) into the [`Account`]. /// /// If `Account` did not have this signatory present, `true` is returned. @@ -278,27 +243,6 @@ impl NewAccount { } } -/// Default signature condition check for accounts. -/// Returns true if any of the signatories have signed the transaction. -impl Default for SignatureCheckCondition { - #[inline] - fn default() -> Self { - Self( - ContainsAny::new( - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str(TRANSACTION_SIGNATORIES_VALUE) - .expect("TRANSACTION_SIGNATORIES_VALUE should be valid."), - )), - EvaluatesTo::new_unchecked(ContextValue::new( - Name::from_str(ACCOUNT_SIGNATORIES_VALUE) - .expect("ACCOUNT_SIGNATORIES_VALUE should be valid."), - )), - ) - .into(), - ) - } -} - impl HasMetadata for NewAccount { fn metadata(&self) -> &Metadata { &self.metadata @@ -345,7 +289,183 @@ impl FromStr for AccountId { } } +impl Default for SignatureCheckCondition { + fn default() -> Self { + Self::AnyAccountSignatureOr(ConstVec::new_empty()) + } +} + +impl SignatureCheckCondition { + /// Shorthand to create a [`SignatureCheckCondition::AnyAccountSignatureOr`] variant without additional allowed signatures. + #[inline] + pub fn any_account_signature() -> Self { + Self::AnyAccountSignatureOr(ConstVec::new_empty()) + } + + /// Shorthand to create a [`SignatureCheckCondition::AllAccountSignaturesAnd`] variant without additional required signatures. + #[inline] + pub fn all_account_signatures() -> Self { + Self::AllAccountSignaturesAnd(ConstVec::new_empty()) + } + + /// Checks whether the transaction contains all the signatures required by the `SignatureCheckCondition`. + pub fn check( + &self, + account_signatories: &btree_set::BTreeSet, + transaction_signatories: &btree_set::BTreeSet, + ) -> MustUse { + let result = match &self { + SignatureCheckCondition::AnyAccountSignatureOr(additional_allowed_signatures) => { + account_signatories + .iter() + .chain(additional_allowed_signatures.as_ref()) + .any(|allowed| transaction_signatories.contains(allowed)) + } + SignatureCheckCondition::AllAccountSignaturesAnd(additional_required_signatures) => { + account_signatories + .iter() + .chain(additional_required_signatures.as_ref()) + .all(|required_signature| transaction_signatories.contains(required_signature)) + } + }; + + MustUse::new(result) + } +} + /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{Account, AccountId, SignatureCheckCondition}; } + +#[cfg(test)] +mod tests { + use core::cmp::Ordering; + + use iroha_crypto::{KeyPair, PublicKey}; + + use super::{AccountId, SignatureCheckCondition}; + use crate::{domain::DomainId, name::Name}; + + fn make_key() -> PublicKey { + KeyPair::generate().unwrap().public_key().clone() + } + + fn check_signature_check_condition( + condition: &SignatureCheckCondition, + account_signatories: &[&PublicKey], + tx_signatories: &[&PublicKey], + result: bool, + ) { + let account_signatories = account_signatories.iter().copied().cloned().collect(); + let tx_signatories = tx_signatories.iter().copied().cloned().collect(); + + assert_eq!( + condition.check(&account_signatories, &tx_signatories,).0, + result + ); + } + + #[test] + fn signature_check_condition_default() { + let key1 = make_key(); + let key2 = make_key(); + let key3 = make_key(); + let condition = SignatureCheckCondition::default(); + + check_signature_check_condition(&condition, &[], &[], false); + check_signature_check_condition(&condition, &[&key1], &[], false); + check_signature_check_condition(&condition, &[], &[&key1], false); + check_signature_check_condition(&condition, &[&key1], &[&key1], true); + check_signature_check_condition(&condition, &[&key1], &[&key2], false); + check_signature_check_condition(&condition, &[&key1, &key2, &key3], &[&key1], true); + check_signature_check_condition(&condition, &[&key1, &key2, &key3], &[&key2], true); + check_signature_check_condition(&condition, &[&key1, &key2, &key3], &[&key3], true); + } + + #[test] + fn signature_check_condition_all() { + let key1 = make_key(); + let key2 = make_key(); + let key3 = make_key(); + let condition = SignatureCheckCondition::all_account_signatures(); + + // technically, `\forall x \in \emptyset, check(x)` is true for any `check`, so this evaluate to true + // maybe not the logic we want? + check_signature_check_condition(&condition, &[], &[], true); + check_signature_check_condition(&condition, &[], &[&key1], true); + + check_signature_check_condition(&condition, &[&key1], &[], false); + check_signature_check_condition(&condition, &[&key1], &[&key1], true); + check_signature_check_condition(&condition, &[&key1], &[&key2], false); + check_signature_check_condition(&condition, &[&key1, &key2, &key3], &[&key1], false); + check_signature_check_condition(&condition, &[&key1, &key2, &key3], &[&key2], false); + check_signature_check_condition(&condition, &[&key1, &key2, &key3], &[&key3], false); + check_signature_check_condition(&condition, &[&key1, &key2], &[&key1, &key2, &key3], true); + check_signature_check_condition(&condition, &[&key1, &key2], &[&key1, &key2], true); + check_signature_check_condition(&condition, &[&key1, &key2], &[&key2, &key3], false); + } + + #[test] + fn signature_check_condition_any_or() { + let key1 = make_key(); + let key2 = make_key(); + let key3 = make_key(); + let condition = SignatureCheckCondition::AnyAccountSignatureOr(vec![key3.clone()].into()); + + check_signature_check_condition(&condition, &[], &[], false); + check_signature_check_condition(&condition, &[], &[&key3], true); + check_signature_check_condition(&condition, &[], &[&key2], false); + check_signature_check_condition(&condition, &[], &[&key1, &key2], false); + check_signature_check_condition(&condition, &[&key2], &[&key2], true); + check_signature_check_condition(&condition, &[&key2, &key3], &[&key2], true); + check_signature_check_condition(&condition, &[&key1, &key2], &[&key2], true); + } + + #[test] + fn signature_check_condition_all_and() { + let key1 = make_key(); + let key2 = make_key(); + let key3 = make_key(); + let condition = SignatureCheckCondition::AllAccountSignaturesAnd(vec![key3.clone()].into()); + + check_signature_check_condition(&condition, &[], &[], false); + check_signature_check_condition(&condition, &[], &[&key3], true); + check_signature_check_condition(&condition, &[&key1], &[&key3], false); + check_signature_check_condition(&condition, &[&key1], &[&key1, &key3], true); + check_signature_check_condition(&condition, &[&key2], &[&key1, &key3], false); + check_signature_check_condition(&condition, &[&key2], &[&key1, &key2, &key3], true); + } + + #[test] + fn cmp_account_id() { + let domain_id_a: DomainId = "a".parse().expect("failed to parse DomainId"); + let domain_id_b: DomainId = "b".parse().expect("failed to parse DomainId"); + let name_a: Name = "a".parse().expect("failed to parse Name"); + let name_b: Name = "b".parse().expect("failed to parse Name"); + + let mut account_ids = Vec::new(); + for name in [&name_a, &name_b] { + for domain_id in [&domain_id_a, &domain_id_b] { + account_ids.push(AccountId::new(name.clone(), domain_id.clone())); + } + } + + for account_id_1 in &account_ids { + for account_id_2 in &account_ids { + match ( + account_id_1.domain_id.cmp(&account_id_2.domain_id), + account_id_1.name.cmp(&account_id_2.name), + ) { + // `DomainId` take precedence in comparison + // if `DomainId`s are equal than comparison based on `Name`s + (Ordering::Equal, ordering) | (ordering, _) => assert_eq!( + account_id_1.cmp(account_id_2), + ordering, + "{account_id_1:?} and {account_id_2:?} are expected to be {ordering:?}" + ), + } + } + } + } +} diff --git a/data_model/src/asset.rs b/data_model/src/asset.rs index 112234a2d18..25c19006e56 100644 --- a/data_model/src/asset.rs +++ b/data_model/src/asset.rs @@ -1,7 +1,5 @@ //! This module contains [`Asset`] structure, it's implementation and related traits and //! instructions implementations. -#![allow(clippy::std_instead_of_alloc)] - #[cfg(not(feature = "std"))] use alloc::{collections::btree_map, format, string::String, vec::Vec}; use core::{fmt, str::FromStr}; @@ -220,6 +218,8 @@ pub mod model { Clone, PartialEq, Eq, + PartialOrd, + Ord, Decode, Encode, Deserialize, diff --git a/data_model/src/block.rs b/data_model/src/block.rs index c08d59e8c32..0eb2cb5079c 100644 --- a/data_model/src/block.rs +++ b/data_model/src/block.rs @@ -6,347 +6,333 @@ #[cfg(not(feature = "std"))] use alloc::{boxed::Box, format, string::String, vec::Vec}; -use core::{cmp::Ordering, fmt::Display}; +use core::{fmt::Display, time::Duration}; use derive_more::Display; use getset::Getters; -#[cfg(feature = "std")] -use iroha_crypto::SignatureOf; -use iroha_crypto::{Hash, HashOf, MerkleTree, SignaturesOf}; +#[cfg(all(feature = "std", feature = "transparent_api"))] +use iroha_crypto::KeyPair; +use iroha_crypto::{HashOf, MerkleTree, SignaturesOf}; +use iroha_data_model_derive::model; +use iroha_macro::FromVariant; +use iroha_primitives::unique_vec::UniqueVec; use iroha_schema::IntoSchema; -use iroha_version::{declare_versioned_with_scale, version_with_scale}; +use iroha_version::{declare_versioned, version_with_scale}; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; -pub use self::{ - committed::{CommittedBlock, PartialBlockHash, VersionedCommittedBlock}, - header::BlockHeader, -}; -use crate::{events::prelude::*, model, peer, transaction::prelude::*}; +pub use self::model::*; +use crate::{events::prelude::*, peer, transaction::prelude::*}; -mod header { - pub use self::model::*; +#[model] +pub mod model { use super::*; - #[model] - pub mod model { - use super::*; - - /// Header of the block. The hash should be taken from its byte representation. - #[derive( - Debug, - Display, - Clone, - PartialEq, - Eq, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[cfg_attr( - feature = "std", - display(fmt = "Block â„–{height} (hash: {});", "HashOf::new(&self)") - )] - #[cfg_attr(not(feature = "std"), display(fmt = "Block â„–{height}"))] - #[getset(get = "pub")] - #[ffi_type] - pub struct BlockHeader { - /// Unix time (in milliseconds) of block forming by a peer. - pub timestamp: u128, - /// Estimation of consensus duration in milliseconds - pub consensus_estimation: u64, - /// A number of blocks in the chain up to the block. - pub height: u64, - /// Value of view change index used to resolve soft forks - pub view_change_index: u64, - /// Hash of a previous block in the chain. - /// Is an array of zeros for the first block. - pub previous_block_hash: Option>, - /// Hash of merkle tree root of the tree of valid transactions' hashes. - pub transactions_hash: Option>>, - /// Hash of merkle tree root of the tree of rejected transactions' hashes. - pub rejected_transactions_hash: Option>>, - /// Network topology when the block was committed. - // TODO: Derive with getset once FFI impl is fixed - #[getset(skip)] - pub committed_with_topology: Vec, - } + #[derive( + Debug, + Display, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Getters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[cfg_attr( + feature = "std", + display(fmt = "Block â„–{height} (hash: {});", "HashOf::new(&self)") + )] + #[cfg_attr(not(feature = "std"), display(fmt = "Block â„–{height}"))] + #[getset(get = "pub")] + #[allow(missing_docs)] + #[ffi_type] + pub struct BlockHeader { + /// Number of blocks in the chain including this block. + pub height: u64, + /// Creation timestamp (unix time in milliseconds). + #[getset(skip)] + pub timestamp_ms: u64, + /// Hash of the previous block in the chain. + pub previous_block_hash: Option>, + /// Hash of merkle tree root of transactions' hashes. + pub transactions_hash: Option>>, + /// Value of view change index. Used to resolve soft forks. + pub view_change_index: u64, + /// Estimation of consensus duration (in milliseconds). + pub consensus_estimation_ms: u64, } - impl BlockHeader { - /// Serialize the header's data for hashing purposes. - pub fn payload(&self) -> Vec { - let mut data = Vec::new(); - data.extend(&self.timestamp.to_le_bytes()); - data.extend(&self.consensus_estimation.to_le_bytes()); - data.extend(&self.height.to_le_bytes()); - data.extend(&self.view_change_index.to_le_bytes()); - if let Some(hash) = self.previous_block_hash.as_ref() { - data.extend(hash.as_ref()); - } - if let Some(hash) = self.transactions_hash.as_ref() { - data.extend(hash.as_ref()); - } - if let Some(hash) = self.rejected_transactions_hash.as_ref() { - data.extend(hash.as_ref()); - } - for id in &self.committed_with_topology { - data.extend(id.payload()); - } - data - } + #[derive( + Debug, + Display, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Getters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[display(fmt = "({header})")] + #[getset(get = "pub")] + #[allow(missing_docs)] + #[ffi_type] + pub struct BlockPayload { + /// Block header + pub header: BlockHeader, + /// Topology of the network at the time of block commit. + #[getset(skip)] // FIXME: Because ffi related issues + pub commit_topology: UniqueVec, + /// array of transactions, which successfully passed validation and consensus step. + #[getset(skip)] // FIXME: Because ffi related issues + pub transactions: Vec, + /// Event recommendations. + #[getset(skip)] // NOTE: Unused ATM + pub event_recommendations: Vec, } - impl PartialOrd for BlockHeader { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } + /// Signed block + #[version_with_scale(version = 1, versioned_alias = "SignedBlock")] + #[derive( + Debug, + Display, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Getters, + Encode, + Serialize, + IntoSchema, + )] + #[cfg_attr(not(feature = "std"), display(fmt = "Signed block"))] + #[cfg_attr(feature = "std", display(fmt = "{}", "self.hash()"))] + #[getset(get = "pub")] + #[ffi_type] + pub struct SignedBlockV1 { + /// Signatures of peers which approved this block. + #[getset(skip)] + pub signatures: SignaturesOf, + /// Block payload + pub payload: BlockPayload, } +} - impl Ord for BlockHeader { - fn cmp(&self, other: &Self) -> Ordering { - self.timestamp.cmp(&other.timestamp) - } +#[cfg(any(feature = "ffi_export", feature = "ffi_import"))] +declare_versioned!(SignedBlock 1..2, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, FromVariant, iroha_ffi::FfiType, IntoSchema); +#[cfg(all(not(feature = "ffi_export"), not(feature = "ffi_import")))] +declare_versioned!(SignedBlock 1..2, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, FromVariant, IntoSchema); + +impl BlockPayload { + /// Calculate block payload [`Hash`](`iroha_crypto::HashOf`). + #[cfg(feature = "std")] + pub fn hash(&self) -> iroha_crypto::HashOf { + iroha_crypto::HashOf::new(self) } } -mod committed { - use iroha_macro::FromVariant; +impl BlockHeader { + /// Checks if it's a header of a genesis block. + #[inline] + pub const fn is_genesis(&self) -> bool { + self.height == 1 + } - pub use self::model::*; - use super::*; + /// Creation timestamp + pub fn timestamp(&self) -> Duration { + Duration::from_millis(self.timestamp_ms) + } - #[cfg(any(feature = "ffi_import", feature = "ffi_export"))] - declare_versioned_with_scale!(VersionedCommittedBlock 1..2, Debug, Clone, PartialEq, Eq, FromVariant, Deserialize, Serialize, iroha_ffi::FfiType, IntoSchema); - #[cfg(all(not(feature = "ffi_import"), not(feature = "ffi_export")))] - declare_versioned_with_scale!(VersionedCommittedBlock 1..2, Debug, Clone, PartialEq, Eq, FromVariant, Deserialize, Serialize, IntoSchema); + /// Consensus estimation + pub fn consensus_estimation(&self) -> Duration { + Duration::from_millis(self.consensus_estimation_ms) + } +} - #[model] - pub mod model { - use super::*; +impl SignedBlockV1 { + #[cfg(feature = "std")] + fn hash(&self) -> iroha_crypto::HashOf { + iroha_crypto::HashOf::from_untyped_unchecked(iroha_crypto::HashOf::new(self).into()) + } +} - /// The hash of a [`VersionedCommittedBlock`] used for signing in consensus. - /// The normal [`Hashof`] will change based on who - /// has signed the block. If you want to compare the contents of a block only - /// use this hash instead. - #[derive( - Debug, - Display, - Clone, - Copy, - PartialEq, - Eq, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "({internal})")] - #[repr(transparent)] - #[serde(transparent)] - #[ffi_type(unsafe {robust})] - pub struct PartialBlockHash { - /// The hash value. - pub internal: Hash, - } +impl SignedBlock { + /// Block payload + // FIXME: Leaking concrete type BlockPayload from Versioned container. Payload should be versioned + pub fn payload(&self) -> &BlockPayload { + let SignedBlock::V1(block) = self; + block.payload() + } - /// The `CommittedBlock` struct represents a block accepted by consensus - #[version_with_scale(version = 1, versioned_alias = "VersionedCommittedBlock")] - #[derive( - Debug, - Display, - Clone, - PartialEq, - Eq, - Getters, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[display(fmt = "({header})")] - #[getset(get = "pub")] - #[ffi_type] - pub struct CommittedBlock { - /// Block header - pub header: BlockHeader, - // TODO: Derive with getset once FFI impl is fixed - #[getset(skip)] - pub transactions: Vec, - /// Event recommendations. - // TODO: Derive with getset once FFI impl is fixed - #[getset(skip)] - pub event_recommendations: Vec, - /// Signatures of peers which approved this block - #[getset(skip)] - pub signatures: SignaturesOf, - } + /// Used to inject faulty payload for testing + #[cfg(feature = "transparent_api")] + pub fn payload_mut(&mut self) -> &mut BlockPayload { + let SignedBlock::V1(block) = self; + &mut block.payload } - impl VersionedCommittedBlock { - /// Checks if it's a header of a genesis block. - #[inline] - pub const fn is_genesis(&self) -> bool { - let VersionedCommittedBlock::V1(block) = self; - block.header.height == 1 - } + /// Signatures of peers which approved this block. + pub fn signatures(&self) -> &SignaturesOf { + let SignedBlock::V1(block) = self; + &block.signatures + } - /// Convert from `&VersionedCommittedBlock` to V1 reference - #[inline] - pub const fn as_v1(&self) -> &CommittedBlock { - match self { - Self::V1(v1) => v1, - } - } + /// Calculate block hash + #[cfg(feature = "std")] + pub fn hash(&self) -> HashOf { + iroha_crypto::HashOf::new(self) + } - /// Convert from `&mut VersionedCommittedBlock` to V1 mutable reference - #[inline] - pub fn as_mut_v1(&mut self) -> &mut CommittedBlock { - match self { - Self::V1(v1) => v1, - } - } + /// Add additional signatures to this block + /// + /// # Errors + /// + /// If given signature doesn't match block hash + #[cfg(feature = "std")] + #[cfg(feature = "transparent_api")] + pub fn sign(mut self, key_pair: KeyPair) -> Result { + iroha_crypto::SignatureOf::new(key_pair, self.payload()).map(|signature| { + let SignedBlock::V1(block) = &mut self; + block.signatures.insert(signature); + self + }) + } - /// Performs the conversion from `VersionedCommittedBlock` to V1 - #[inline] - pub fn into_v1(self) -> CommittedBlock { - match self { - Self::V1(v1) => v1, - } - } + /// Add additional signatures to this block + /// + /// # Errors + /// + /// If given signature doesn't match block hash + #[cfg(feature = "std")] + #[cfg(feature = "transparent_api")] + pub fn add_signature( + &mut self, + signature: iroha_crypto::SignatureOf, + ) -> Result<(), iroha_crypto::error::Error> { + signature.verify(self.payload())?; - /// Calculate the [`PartialBlockHash`] of this block. - #[cfg(feature = "std")] - #[inline] - pub fn partial_hash(&self) -> PartialBlockHash { - match self { - Self::V1(v1) => v1.partial_hash(), - } - } + let SignedBlock::V1(block) = self; + block.signatures.insert(signature); + + Ok(()) + } - /// Calculate the [`HashOf`] for this block. + /// Add additional signatures to this block + #[cfg(feature = "std")] + #[cfg(feature = "transparent_api")] + pub fn replace_signatures( + &mut self, + signatures: iroha_crypto::SignaturesOf, + ) -> bool { + #[cfg(not(feature = "std"))] + use alloc::collections::BTreeSet; #[cfg(feature = "std")] - pub fn hash(&self) -> HashOf { - match self { - Self::V1(v1) => v1.hash().transmute(), + use std::collections::BTreeSet; + + let SignedBlock::V1(block) = self; + block.signatures = BTreeSet::new().into(); + + for signature in signatures { + if self.add_signature(signature).is_err() { + return false; } } - /// Return signatures that are verified with the `hash` of this block - #[cfg(feature = "std")] - #[inline] - pub fn signatures(&self) -> impl ExactSizeIterator> { - self.as_v1().signatures().map(SignatureOf::transmute_ref) - } + true } +} - impl Display for VersionedCommittedBlock { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - self.as_v1().fmt(f) - } - } +mod candidate { + use parity_scale_codec::Input; - impl PartialOrd for VersionedCommittedBlock { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } - } + use super::*; - impl Ord for VersionedCommittedBlock { - fn cmp(&self, other: &Self) -> Ordering { - self.as_v1().cmp(other.as_v1()) - } + #[derive(Decode, Deserialize)] + struct SignedBlockCandidate { + signatures: SignaturesOf, + payload: BlockPayload, } - impl CommittedBlock { - /// Checks if it's a header of a genesis block. - pub const fn is_genesis(&self) -> bool { - self.header.height == 1 - } + impl SignedBlockCandidate { + fn validate(self) -> Result { + #[cfg(feature = "std")] + self.validate_signatures()?; + #[cfg(feature = "std")] + self.validate_header()?; - /// Calculate the partial hash of the current block. - /// [`CommitedBlock`] should have the same partial hash as [`PendingBlock`]. - #[cfg(feature = "std")] - #[inline] - pub fn partial_hash(&self) -> PartialBlockHash { - PartialBlockHash { - internal: Hash::new(self.header.payload()), + if self.payload.transactions.is_empty() { + return Err("Block is empty"); } + + Ok(SignedBlockV1 { + payload: self.payload, + signatures: self.signatures, + }) } - /// Calculate the complete hash of the block that includes signatures. + #[cfg(feature = "std")] - #[inline] - pub fn hash(&self) -> HashOf { - let mut data = Vec::new(); - data.extend(self.header.payload()); - for s in self.signatures.iter() { - data.extend(s.key_payload()); - data.extend(s.signature_payload()); + fn validate_header(&self) -> Result<(), &'static str> { + let actual_txs_hash = self.payload.header().transactions_hash; + + let expected_txs_hash = self + .payload + .transactions + .iter() + .map(TransactionValue::hash) + .collect::>() + .hash(); + + if expected_txs_hash != actual_txs_hash { + return Err("Transactions' hash incorrect. Expected: {expected_txs_hash:?}, actual: {actual_txs_hash:?}"); } - HashOf::from_untyped_unchecked(Hash::new(&data)) - } + // TODO: Validate Event recommendations somehow? - /// Return signatures that are verified with the `hash` of this block - #[cfg(feature = "std")] - #[inline] - pub fn signatures(&self) -> impl ExactSizeIterator> { - self.signatures.iter() + Ok(()) } - } - impl PartialOrd for CommittedBlock { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) + #[cfg(feature = "std")] + fn validate_signatures(&self) -> Result<(), &'static str> { + self.signatures + .verify(&self.payload) + .map_err(|_| "Transaction contains invalid signatures") } } - impl Ord for CommittedBlock { - fn cmp(&self, other: &Self) -> Ordering { - self.header.cmp(&other.header) + impl Decode for SignedBlockV1 { + fn decode(input: &mut I) -> Result { + SignedBlockCandidate::decode(input)? + .validate() + .map_err(Into::into) } } + impl<'de> Deserialize<'de> for SignedBlockV1 { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + use serde::de::Error as _; - #[cfg(feature = "std")] - impl From<&CommittedBlock> for Vec { - fn from(block: &CommittedBlock) -> Self { - let tx = block.transactions.iter().map(|tx| { - let status = tx.error.as_ref().map_or_else( - || PipelineStatus::Committed, - |error| PipelineStatus::Rejected(error.clone().into()), - ); - - PipelineEvent { - entity_kind: PipelineEntityKind::Transaction, - status, - hash: tx.payload().hash().into(), - } - .into() - }); - let current_block = core::iter::once( - PipelineEvent { - entity_kind: PipelineEntityKind::Block, - status: PipelineStatus::Committed, - hash: block.hash().into(), - } - .into(), - ); - - tx.chain(current_block).collect() + SignedBlockCandidate::deserialize(deserializer)? + .validate() + .map_err(D::Error::custom) } } +} - #[cfg(feature = "std")] - impl From<&VersionedCommittedBlock> for Vec { - #[inline] - fn from(block: &VersionedCommittedBlock) -> Self { - block.as_v1().into() - } +impl Display for SignedBlock { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let SignedBlock::V1(block) = self; + block.fmt(f) } } @@ -355,39 +341,12 @@ pub mod stream { //! Blocks for streaming API. use derive_more::Constructor; - use iroha_macro::FromVariant; use iroha_schema::IntoSchema; - use iroha_version::prelude::*; use parity_scale_codec::{Decode, Encode}; pub use self::model::*; use super::*; - declare_versioned_with_scale!(VersionedBlockMessage 1..2, Debug, Clone, FromVariant, IntoSchema); - - impl VersionedBlockMessage { - /// Convert from `&VersionedBlockPublisherMessage` to V1 reference - pub const fn as_v1(&self) -> &BlockMessage { - match self { - Self::V1(v1) => v1, - } - } - - /// Convert from `&mut VersionedBlockPublisherMessage` to V1 mutable reference - pub fn as_mut_v1(&mut self) -> &mut BlockMessage { - match self { - Self::V1(v1) => v1, - } - } - - /// Performs the conversion from `VersionedBlockPublisherMessage` to V1 - pub fn into_v1(self) -> BlockMessage { - match self { - Self::V1(v1) => v1, - } - } - } - #[model] pub mod model { use core::num::NonZeroU64; @@ -395,56 +354,25 @@ pub mod stream { use super::*; /// Request sent to subscribe to blocks stream starting from the given height. - #[version_with_scale(version = 1, versioned_alias = "VersionedBlockSubscriptionRequest")] #[derive(Debug, Clone, Copy, Constructor, Decode, Encode, IntoSchema)] #[repr(transparent)] pub struct BlockSubscriptionRequest(pub NonZeroU64); - /// Message sent by the stream producer - /// Block sent by the peer. - #[version_with_scale(version = 1, versioned_alias = "VersionedBlockMessage")] + /// Message sent by the stream producer containing block. #[derive(Debug, Clone, Decode, Encode, IntoSchema)] #[repr(transparent)] - pub struct BlockMessage(pub VersionedCommittedBlock); + pub struct BlockMessage(pub SignedBlock); } - impl From for VersionedCommittedBlock { + impl From for SignedBlock { fn from(source: BlockMessage) -> Self { source.0 } } - declare_versioned_with_scale!(VersionedBlockSubscriptionRequest 1..2, Debug, Clone, FromVariant, IntoSchema); - - impl VersionedBlockSubscriptionRequest { - /// Convert from `&VersionedBlockSubscriberMessage` to V1 reference - pub const fn as_v1(&self) -> &BlockSubscriptionRequest { - match self { - Self::V1(v1) => v1, - } - } - - /// Convert from `&mut VersionedBlockSubscriberMessage` to V1 mutable reference - pub fn as_mut_v1(&mut self) -> &mut BlockSubscriptionRequest { - match self { - Self::V1(v1) => v1, - } - } - - /// Performs the conversion from `VersionedBlockSubscriberMessage` to V1 - pub fn into_v1(self) -> BlockSubscriptionRequest { - match self { - Self::V1(v1) => v1, - } - } - } - /// Exports common structs and enums from this module. pub mod prelude { - pub use super::{ - BlockMessage, BlockSubscriptionRequest, VersionedBlockMessage, - VersionedBlockSubscriptionRequest, - }; + pub use super::{BlockMessage, BlockSubscriptionRequest}; } } @@ -466,6 +394,8 @@ pub mod error { Copy, PartialEq, Eq, + PartialOrd, + Ord, iroha_macro::FromVariant, Decode, Encode, diff --git a/data_model/src/domain.rs b/data_model/src/domain.rs index 19e078c6847..850eb3304a6 100644 --- a/data_model/src/domain.rs +++ b/data_model/src/domain.rs @@ -1,7 +1,5 @@ //! This module contains [`Domain`](`crate::domain::Domain`) structure //! and related implementations and trait implementations. -#![allow(clippy::std_instead_of_alloc)] - #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; @@ -85,6 +83,9 @@ pub mod model { pub logo: Option, /// [`Metadata`] of this `Domain` as a key-value store. pub metadata: Metadata, + /// The account that owns this domain. Usually the [`Account`] that registered it. + #[getset(get = "pub")] + pub owned_by: AccountId, } /// Builder which can be submitted in a transaction to create a new [`Domain`] diff --git a/data_model/src/evaluate.rs b/data_model/src/evaluate.rs index 3179515bc34..b566bf4e0b9 100644 --- a/data_model/src/evaluate.rs +++ b/data_model/src/evaluate.rs @@ -69,9 +69,6 @@ pub trait Evaluate { /// # Errors /// Concrete to each implementer. fn evaluate(&self, context: &C) -> Result; - - /// Number of underneath expressions. - fn len(&self) -> usize; } impl> Evaluate for EvaluatesTo @@ -85,10 +82,6 @@ where V::try_from(expr).map_err(|error| EvaluationError::Conversion(error.to_string())) } - - fn len(&self) -> usize { - self.expression.len() - } } impl Evaluate for Expression { @@ -132,33 +125,6 @@ impl Evaluate for Expression { Ok(result) } - - fn len(&self) -> usize { - use Expression::*; - - match self { - Add(add) => add.len(), - Subtract(subtract) => subtract.len(), - Greater(greater) => greater.len(), - Less(less) => less.len(), - Equal(equal) => equal.len(), - Not(not) => not.len(), - And(and) => and.len(), - Or(or) => or.len(), - If(if_expression) => if_expression.len(), - Raw(raw) => raw.len(), - Query(query) => query.len(), - Contains(contains) => contains.len(), - ContainsAll(contains_all) => contains_all.len(), - ContainsAny(contains_any) => contains_any.len(), - Where(where_expression) => where_expression.len(), - ContextValue(context_value) => context_value.len(), - Multiply(multiply) => multiply.len(), - Divide(divide) => divide.len(), - Mod(modulus) => modulus.len(), - RaiseTo(raise_to) => raise_to.len(), - } - } } impl Evaluate for ContextValue { @@ -170,10 +136,6 @@ impl Evaluate for ContextValue { .cloned() .ok_or_else(|| EvaluationError::Find(self.value_name.to_string())) } - - fn len(&self) -> usize { - 1 - } } mod numeric { @@ -207,10 +169,6 @@ mod numeric { Ok(result) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } impl Evaluate for Subtract { @@ -241,10 +199,6 @@ mod numeric { Ok(result) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } impl Evaluate for Multiply { @@ -275,10 +229,6 @@ mod numeric { Ok(result) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } impl Evaluate for RaiseTo { @@ -306,10 +256,6 @@ mod numeric { Ok(result) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } impl Evaluate for Divide { @@ -340,10 +286,6 @@ mod numeric { Ok(result) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } impl Evaluate for Mod { @@ -370,10 +312,6 @@ mod numeric { Ok(result) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } } @@ -399,10 +337,6 @@ mod logical { Ok(result) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } impl Evaluate for Less { @@ -424,22 +358,15 @@ mod logical { Ok(result) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } - impl Evaluate for Not { + impl Evaluate for Equal { type Value = bool; fn evaluate(&self, context: &C) -> Result { - let expression = self.expression.evaluate(context)?; - Ok(!expression) - } - - fn len(&self) -> usize { - self.expression.len() + 1 + let left = self.left.evaluate(context)?; + let right = self.right.evaluate(context)?; + Ok(left == right) } } @@ -451,10 +378,6 @@ mod logical { let right = self.right.evaluate(context)?; Ok(left && right) } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } impl Evaluate for Or { @@ -465,9 +388,14 @@ mod logical { let right = self.right.evaluate(context)?; Ok(left || right) } + } - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 + impl Evaluate for Not { + type Value = bool; + + fn evaluate(&self, context: &C) -> Result { + let expression = self.expression.evaluate(context)?; + Ok(!expression) } } @@ -479,10 +407,6 @@ mod logical { let element = self.element.evaluate(context)?; Ok(collection.contains(&element)) } - - fn len(&self) -> usize { - self.collection.len() + self.element.len() + 1 - } } impl Evaluate for ContainsAll { @@ -493,10 +417,6 @@ mod logical { let elements = self.elements.evaluate(context)?; Ok(elements.iter().all(|element| collection.contains(element))) } - - fn len(&self) -> usize { - self.collection.len() + self.elements.len() + 1 - } } impl Evaluate for ContainsAny { @@ -507,24 +427,6 @@ mod logical { let elements = self.elements.evaluate(context)?; Ok(elements.iter().any(|element| collection.contains(element))) } - - fn len(&self) -> usize { - self.collection.len() + self.elements.len() + 1 - } - } - - impl Evaluate for Equal { - type Value = bool; - - fn evaluate(&self, context: &C) -> Result { - let left = self.left.evaluate(context)?; - let right = self.right.evaluate(context)?; - Ok(left == right) - } - - fn len(&self) -> usize { - self.left.len() + self.right.len() + 1 - } } } @@ -539,11 +441,6 @@ impl Evaluate for If { self.otherwise.evaluate(context) } } - - fn len(&self) -> usize { - // TODO: This is wrong because we don't evaluate both branches - self.condition.len() + self.then.len() + self.otherwise.len() + 1 - } } impl Evaluate for Where { @@ -565,10 +462,6 @@ impl Evaluate for Where { combined_context.update(additional_context?); self.expression.evaluate(&combined_context) } - - fn len(&self) -> usize { - self.expression.len() + self.values.values().map(EvaluatesTo::len).sum::() + 1 - } } impl Evaluate for QueryBox { @@ -579,10 +472,6 @@ impl Evaluate for QueryBox { .query(self) .map_err(|err| EvaluationError::Validation(Box::new(err))) } - - fn len(&self) -> usize { - 1 - } } #[model] @@ -642,8 +531,6 @@ pub mod prelude { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use core::{fmt::Debug, str::FromStr as _}; use iroha_crypto::KeyPair; diff --git a/data_model/src/events/data/events.rs b/data_model/src/events/data/events.rs index 7acfe505346..8bf07a8e49b 100644 --- a/data_model/src/events/data/events.rs +++ b/data_model/src/events/data/events.rs @@ -16,6 +16,8 @@ macro_rules! data_event { Clone, PartialEq, Eq, + PartialOrd, + Ord, Filter, HasOrigin, parity_scale_codec::Decode, @@ -37,7 +39,19 @@ pub mod model { /// Generic [`MetadataChanged`] struct. /// Contains the changed metadata (`(key, value)` pair), either inserted or removed, which is determined by the wrapping event. - #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] // TODO: Generics are not supported. Figure out what to do //#[getset(get = "pub")] #[ffi_type] @@ -60,12 +74,23 @@ pub mod model { Trigger(trigger::TriggerEvent), PermissionTokenSchemaUpdate(permission::PermissionTokenSchemaUpdateEvent), Configuration(config::ConfigurationEvent), - Validator(validator::ValidatorEvent), + Executor(executor::ExecutorEvent), } /// Event #[derive( - Debug, Clone, PartialEq, Eq, FromVariant, Decode, Encode, Deserialize, Serialize, IntoSchema, + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + FromVariant, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, )] #[ffi_type] pub enum DataEvent { @@ -87,8 +112,8 @@ pub mod model { PermissionToken(permission::PermissionTokenSchemaUpdateEvent), /// Configuration event Configuration(config::ConfigurationEvent), - /// Validator event - Validator(validator::ValidatorEvent), + /// Executor event + Executor(executor::ExecutorEvent), } } @@ -145,7 +170,18 @@ mod asset { /// Depending on the wrapping event, [`Self`] represents the added or removed asset quantity. #[derive( - Debug, Clone, PartialEq, Eq, Getters, Decode, Encode, Deserialize, Serialize, IntoSchema, + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Getters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, )] #[getset(get = "pub")] #[ffi_type] @@ -156,7 +192,18 @@ mod asset { /// [`Self`] represents updated total asset quantity. #[derive( - Debug, Clone, PartialEq, Eq, Getters, Decode, Encode, Deserialize, Serialize, IntoSchema, + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Getters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, )] #[getset(get = "pub")] #[ffi_type] @@ -167,7 +214,18 @@ mod asset { /// [`Self`] represents updated total asset quantity. #[derive( - Debug, Clone, PartialEq, Eq, Getters, Decode, Encode, Deserialize, Serialize, IntoSchema, + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Getters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, )] #[getset(get = "pub")] #[ffi_type] @@ -257,7 +315,7 @@ mod permission { use super::*; /// Information about permission tokens update. - /// Only happens when registering new validator + /// Only happens when registering new executor #[derive( Debug, Clone, @@ -382,6 +440,7 @@ mod account { mod domain { //! This module contains `DomainEvent` and its impls + pub use self::model::*; use super::*; // type alias required by `Filter` macro @@ -401,6 +460,35 @@ mod domain { MetadataInserted(DomainMetadataChanged), #[has_origin(metadata_changed => &metadata_changed.target_id)] MetadataRemoved(DomainMetadataChanged), + #[has_origin(owner_changed => &owner_changed.domain_id)] + OwnerChanged(DomainOwnerChanged), + } + } + + #[model] + pub mod model { + use super::*; + + /// Event indicate that owner of the [`Domain`] is changed + #[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Getters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, + )] + #[getset(get = "pub")] + #[ffi_type] + pub struct DomainOwnerChanged { + pub domain_id: DomainId, + pub new_owner: AccountId, } } } @@ -414,7 +502,7 @@ mod trigger { use super::*; data_event! { - #[has_origin(origin = Trigger)] + #[has_origin(origin = Trigger)] pub enum TriggerEvent { Created(TriggerId), Deleted(TriggerId), @@ -466,7 +554,7 @@ mod config { } } -mod validator { +mod executor { use iroha_data_model_derive::model; pub use self::model::*; @@ -481,6 +569,8 @@ mod validator { Clone, PartialEq, Eq, + PartialOrd, + Ord, parity_scale_codec::Decode, parity_scale_codec::Encode, serde::Deserialize, @@ -491,19 +581,19 @@ mod validator { #[ffi_type] #[serde(untagged)] // Unaffected by #3330, as single unit variant #[repr(transparent)] - pub enum ValidatorEvent { + pub enum ExecutorEvent { Upgraded, } - /// Filter for [`ValidatorEvent`]. - pub enum ValidatorFilter { + /// Filter for [`ExecutorEvent`]. + pub enum ExecutorFilter { Upgraded, } } #[cfg(feature = "transparent_api")] - impl super::Filter for ValidatorFilter { - type Event = ValidatorEvent; + impl super::Filter for ExecutorFilter { + type Event = ExecutorEvent; fn matches(&self, event: &Self::Event) -> bool { match (self, event) { @@ -558,8 +648,8 @@ impl WorldEvent { WorldEvent::Configuration(config_event) => { events.push(DataEvent::Configuration(config_event)); } - WorldEvent::Validator(validator_event) => { - events.push(DataEvent::Validator(validator_event)); + WorldEvent::Executor(executor_event) => { + events.push(DataEvent::Executor(executor_event)); } } @@ -598,7 +688,7 @@ impl DataEvent { | Self::Configuration(_) | Self::Role(_) | Self::PermissionToken(_) - | Self::Validator(_) => None, + | Self::Executor(_) => None, } } } @@ -615,14 +705,14 @@ pub mod prelude { AssetEventFilter, AssetFilter, }, config::ConfigurationEvent, - domain::{DomainEvent, DomainEventFilter, DomainFilter}, + domain::{DomainEvent, DomainEventFilter, DomainFilter, DomainOwnerChanged}, + executor::{ExecutorEvent, ExecutorFilter}, peer::{PeerEvent, PeerEventFilter, PeerFilter}, permission::PermissionTokenSchemaUpdateEvent, role::{PermissionRemoved, RoleEvent, RoleEventFilter, RoleFilter}, trigger::{ TriggerEvent, TriggerEventFilter, TriggerFilter, TriggerNumberOfExecutionsChanged, }, - validator::{ValidatorEvent, ValidatorFilter}, DataEvent, HasOrigin, MetadataChanged, WorldEvent, }; } diff --git a/data_model/src/events/data/filters.rs b/data_model/src/events/data/filters.rs index 28ef6df113e..30d4029e018 100644 --- a/data_model/src/events/data/filters.rs +++ b/data_model/src/events/data/filters.rs @@ -212,7 +212,6 @@ mod tests { use crate::{ account::AccountsMap, asset::{AssetDefinitionsMap, AssetTotalQuantityMap, AssetsMap}, - role::RoleIds, }; #[test] @@ -221,6 +220,7 @@ mod tests { let domain_name = "wonderland".parse().expect("Valid"); let account_name = "alice".parse().expect("Valid"); let asset_name = "rose".parse().expect("Valid"); + let domain_owner_id = "genesis@genesis".parse().expect("Valid"); let domain_id = DomainId::new(domain_name); let domain = Domain { @@ -230,6 +230,7 @@ mod tests { asset_total_quantities: AssetTotalQuantityMap::default(), logo: None, metadata: Metadata::default(), + owned_by: domain_owner_id, }; let account_id = AccountId::new(account_name, domain_id.clone()); let account = Account { @@ -238,7 +239,6 @@ mod tests { signatories: BTreeSet::default(), signature_check_condition: SignatureCheckCondition::default(), metadata: Metadata::default(), - roles: RoleIds::default(), }; let asset_id = AssetId::new( AssetDefinitionId::new(asset_name, domain_id), diff --git a/data_model/src/events/execute_trigger.rs b/data_model/src/events/execute_trigger.rs index b9222c16387..9b05dde3163 100644 --- a/data_model/src/events/execute_trigger.rs +++ b/data_model/src/events/execute_trigger.rs @@ -18,7 +18,8 @@ pub mod model { Clone, PartialEq, Eq, - Hash, + PartialOrd, + Ord, Getters, Decode, Encode, @@ -43,7 +44,6 @@ pub mod model { Ord, PartialEq, Eq, - Hash, Constructor, Decode, Encode, diff --git a/data_model/src/events/mod.rs b/data_model/src/events/mod.rs index 93ef1b49aaa..37382ddcc8a 100644 --- a/data_model/src/events/mod.rs +++ b/data_model/src/events/mod.rs @@ -23,7 +23,18 @@ pub mod model { #[allow(missing_docs)] #[derive( - Debug, Clone, PartialEq, Eq, FromVariant, Decode, Encode, Deserialize, Serialize, IntoSchema, + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + FromVariant, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, )] #[ffi_type] pub enum Event { @@ -185,45 +196,18 @@ pub mod stream { pub use self::model::*; use super::*; - declare_versioned_with_scale!(VersionedEventMessage 1..2, Debug, Clone, FromVariant, IntoSchema); - - impl VersionedEventMessage { - #[allow(missing_docs)] - pub const fn as_v1(&self) -> &EventMessage { - match self { - Self::V1(v1) => v1, - } - } - - #[allow(missing_docs)] - pub fn as_mut_v1(&mut self) -> &mut EventMessage { - match self { - Self::V1(v1) => v1, - } - } - - #[allow(missing_docs)] - pub fn into_v1(self) -> EventMessage { - match self { - Self::V1(v1) => v1, - } - } - } - #[model] pub mod model { use super::*; /// Message sent by the stream producer. /// Event sent by the peer. - #[version_with_scale(version = 1, versioned_alias = "VersionedEventMessage")] #[derive(Debug, Clone, Decode, Encode, IntoSchema)] #[repr(transparent)] pub struct EventMessage(pub Event); /// Message sent by the stream consumer. /// Request sent by the client to subscribe to events. - #[version_with_scale(version = 1, versioned_alias = "VersionedEventSubscriptionRequest")] #[derive(Debug, Clone, Constructor, Decode, Encode, IntoSchema)] #[repr(transparent)] pub struct EventSubscriptionRequest(pub FilterBox); @@ -234,40 +218,12 @@ pub mod stream { source.0 } } - - declare_versioned_with_scale!(VersionedEventSubscriptionRequest 1..2, Debug, Clone, FromVariant, IntoSchema); - - impl VersionedEventSubscriptionRequest { - #[allow(missing_docs)] - pub const fn as_v1(&self) -> &EventSubscriptionRequest { - match self { - Self::V1(v1) => v1, - } - } - - #[allow(missing_docs)] - pub fn as_mut_v1(&mut self) -> &mut EventSubscriptionRequest { - match self { - Self::V1(v1) => v1, - } - } - - #[allow(missing_docs)] - pub fn into_v1(self) -> EventSubscriptionRequest { - match self { - Self::V1(v1) => v1, - } - } - } } /// Exports common structs and enums from this module. pub mod prelude { #[cfg(feature = "http")] - pub use super::stream::{ - EventMessage, EventSubscriptionRequest, VersionedEventMessage, - VersionedEventSubscriptionRequest, - }; + pub use super::stream::{EventMessage, EventSubscriptionRequest}; #[cfg(feature = "transparent_api")] pub use super::Filter; pub use super::{ diff --git a/data_model/src/events/notification.rs b/data_model/src/events/notification.rs index 423d5f77826..8f98f3f0c73 100644 --- a/data_model/src/events/notification.rs +++ b/data_model/src/events/notification.rs @@ -21,7 +21,18 @@ pub mod model { /// Notification event for events that arise during block application process like trigger execution for example #[derive( - Debug, Clone, FromVariant, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + FromVariant, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, )] #[ffi_type] #[non_exhaustive] @@ -33,11 +44,11 @@ pub mod model { #[derive( Debug, Clone, - Getters, PartialEq, Eq, PartialOrd, Ord, + Getters, Constructor, Decode, Encode, @@ -56,12 +67,12 @@ pub mod model { #[derive( Debug, Clone, - FromVariant, - EnumDiscriminants, PartialEq, Eq, PartialOrd, Ord, + FromVariant, + EnumDiscriminants, Decode, Encode, Deserialize, @@ -113,12 +124,12 @@ pub mod model { #[derive( Debug, Clone, - Constructor, - Getters, PartialEq, Eq, PartialOrd, Ord, + Constructor, + Getters, Decode, Encode, Deserialize, diff --git a/data_model/src/events/pipeline.rs b/data_model/src/events/pipeline.rs index d375cba3a18..27dbb58ac59 100644 --- a/data_model/src/events/pipeline.rs +++ b/data_model/src/events/pipeline.rs @@ -40,6 +40,7 @@ pub mod model { /// If `Some::`, filter by the [`StatusKind`]. If `None`, accept all the [`StatusKind`]. pub(super) status_kind: Option, /// If `Some::`, filter by the [`struct@Hash`]. If `None`, accept all the [`struct@Hash`]. + // TODO: Can we make hash typed like HashOf? pub(super) hash: Option, } @@ -69,7 +70,18 @@ pub mod model { /// Strongly-typed [`Event`] that tells the receiver the kind and the hash of the changed entity as well as its [`Status`]. #[derive( - Debug, Clone, PartialEq, Eq, Getters, Decode, Encode, Deserialize, Serialize, IntoSchema, + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Getters, + Decode, + Encode, + Deserialize, + Serialize, + IntoSchema, )] #[getset(get = "pub")] #[ffi_type] @@ -88,6 +100,8 @@ pub mod model { Clone, PartialEq, Eq, + PartialOrd, + Ord, FromVariant, EnumDiscriminants, Decode, @@ -117,6 +131,8 @@ pub mod model { Clone, PartialEq, Eq, + PartialOrd, + Ord, FromVariant, Decode, Encode, @@ -211,8 +227,6 @@ pub mod prelude { #[cfg(test)] #[cfg(feature = "transparent_api")] mod tests { - #![allow(clippy::restriction)] - #[cfg(not(feature = "std"))] use alloc::{string::ToString as _, vec, vec::Vec}; diff --git a/data_model/src/events/time.rs b/data_model/src/events/time.rs index a85ec84abfc..dc676145f78 100644 --- a/data_model/src/events/time.rs +++ b/data_model/src/events/time.rs @@ -1,6 +1,4 @@ //! Time event and filter -#![allow(clippy::arithmetic_side_effects)] - use core::{ops::Range, time::Duration}; use derive_more::Constructor; @@ -23,6 +21,8 @@ pub mod model { Copy, PartialEq, Eq, + PartialOrd, + Ord, Getters, Decode, Encode, diff --git a/data_model/src/validator.rs b/data_model/src/executor.rs similarity index 79% rename from data_model/src/validator.rs rename to data_model/src/executor.rs index fc086f170ca..8207dcb6250 100644 --- a/data_model/src/validator.rs +++ b/data_model/src/executor.rs @@ -1,4 +1,4 @@ -//! Structures, traits and impls related to *runtime* `Validator`s. +//! Structures, traits and impls related to *runtime* `Executor`s. #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; @@ -17,10 +17,10 @@ use crate::transaction::WasmSmartContract; pub mod model { use super::*; - /// validator that checks if an operation satisfies some conditions. + /// executor that checks if an operation satisfies some conditions. /// /// Can be used with things like [`Transaction`]s, - /// [`InstructionBox`]s, etc. + /// [`InstructionExpr`]s, etc. #[derive( Debug, Clone, @@ -42,8 +42,8 @@ pub mod model { #[repr(transparent)] // TODO: Derive with getset once FFI impl is fixed //#[getset(get = "pub")] - pub struct Validator { - /// WASM code of the validator + pub struct Executor { + /// WASM code of the executor pub wasm: WasmSmartContract, } @@ -51,16 +51,16 @@ pub mod model { // implemented use: #[cfg(any(feature = "transparent_api", feature = "ffi_import"))] } -/// Result type that every validator should return. +/// Result type that every executor should return. pub type Result = core::result::Result; /// Migration error type. pub type MigrationError = String; -/// Result type for a validator's `migrate()` entrypoint. +/// Result type for a executor's `migrate()` entrypoint. pub type MigrationResult = Result<(), MigrationError>; pub mod prelude { //! The prelude re-exports most commonly used traits, structs and macros from this crate. - pub use super::Validator; + pub use super::Executor; } diff --git a/data_model/src/ipfs.rs b/data_model/src/ipfs.rs index 2af84ac2b23..13ff6cf0610 100644 --- a/data_model/src/ipfs.rs +++ b/data_model/src/ipfs.rs @@ -107,9 +107,7 @@ impl Decode for IpfsPath { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - - use parity_scale_codec::DecodeAll; + use parity_scale_codec::DecodeAll as _; use super::*; diff --git a/data_model/src/isi.rs b/data_model/src/isi.rs index ea03aafe13a..c0b2fd41da1 100644 --- a/data_model/src/isi.rs +++ b/data_model/src/isi.rs @@ -19,7 +19,7 @@ use super::{expression::EvaluatesTo, prelude::*, IdBox, RegistrableBox, Value}; use crate::{seal, Level, Registered}; /// Marker trait designating instruction -pub trait Instruction: Into + seal::Sealed {} +pub trait Instruction: Into + seal::Sealed {} macro_rules! isi { ($($meta:meta)* $item:item) => { @@ -79,67 +79,67 @@ pub mod model { )] #[ffi_type(opaque)] #[allow(missing_docs)] - pub enum InstructionBox { + pub enum InstructionExpr { #[debug(fmt = "{_0:?}")] - Register(RegisterBox), + Register(RegisterExpr), #[debug(fmt = "{_0:?}")] - Unregister(UnregisterBox), + Unregister(UnregisterExpr), #[debug(fmt = "{_0:?}")] - Mint(MintBox), + Mint(MintExpr), #[debug(fmt = "{_0:?}")] - Burn(BurnBox), + Burn(BurnExpr), #[debug(fmt = "{_0:?}")] - Transfer(TransferBox), + Transfer(TransferExpr), #[debug(fmt = "{_0:?}")] - If(Box), + If(Box), #[debug(fmt = "{_0:?}")] - Pair(Box), + Pair(Box), #[debug(fmt = "{_0:?}")] - Sequence(SequenceBox), + Sequence(SequenceExpr), #[debug(fmt = "{_0:?}")] - SetKeyValue(SetKeyValueBox), + SetKeyValue(SetKeyValueExpr), #[debug(fmt = "{_0:?}")] - RemoveKeyValue(RemoveKeyValueBox), + RemoveKeyValue(RemoveKeyValueExpr), #[debug(fmt = "{_0:?}")] - Grant(GrantBox), + Grant(GrantExpr), #[debug(fmt = "{_0:?}")] - Revoke(RevokeBox), + Revoke(RevokeExpr), #[debug(fmt = "{_0:?}")] - ExecuteTrigger(ExecuteTriggerBox), + ExecuteTrigger(ExecuteTriggerExpr), #[debug(fmt = "{_0:?}")] - SetParameter(SetParameterBox), + SetParameter(SetParameterExpr), #[debug(fmt = "{_0:?}")] - NewParameter(NewParameterBox), - Upgrade(UpgradeBox), + NewParameter(NewParameterExpr), + Upgrade(UpgradeExpr), /// `Log` variant. - Log(LogBox), + Log(LogExpr), #[debug(fmt = "{_0:?}")] - Fail(FailBox), - } - - impl Instruction for InstructionBox {} - - impl Instruction for SetKeyValueBox {} - impl Instruction for RemoveKeyValueBox {} - impl Instruction for RegisterBox {} - impl Instruction for UnregisterBox {} - impl Instruction for MintBox {} - impl Instruction for BurnBox {} - impl Instruction for TransferBox {} - impl Instruction for GrantBox {} - impl Instruction for RevokeBox {} - impl Instruction for SetParameterBox {} - impl Instruction for NewParameterBox {} - impl Instruction for UpgradeBox {} - impl Instruction for ExecuteTriggerBox {} - impl Instruction for FailBox {} - impl Instruction for LogBox {} + Fail(Fail), + } + + impl Instruction for InstructionExpr {} + + impl Instruction for SetKeyValueExpr {} + impl Instruction for RemoveKeyValueExpr {} + impl Instruction for RegisterExpr {} + impl Instruction for UnregisterExpr {} + impl Instruction for MintExpr {} + impl Instruction for BurnExpr {} + impl Instruction for TransferExpr {} + impl Instruction for GrantExpr {} + impl Instruction for RevokeExpr {} + impl Instruction for SetParameterExpr {} + impl Instruction for NewParameterExpr {} + impl Instruction for UpgradeExpr {} + impl Instruction for ExecuteTriggerExpr {} + impl Instruction for LogExpr {} + impl Instruction for Fail {} // Composite instructions - impl Instruction for SequenceBox {} - impl Instruction for Conditional {} - impl Instruction for Pair {} + impl Instruction for ConditionalExpr {} + impl Instruction for SequenceExpr {} + impl Instruction for PairExpr {} } mod transparent { @@ -147,7 +147,7 @@ mod transparent { // because they are never shared between client and server(http)/host(wasm) use super::*; - use crate::validator::Validator; + use crate::executor::Executor; /// Generic instruction to set key value at the object. #[derive(Debug, Clone)] @@ -178,14 +178,14 @@ mod transparent { /// Generic instruction for an unregistration of an object from the identifiable destination. #[derive(Debug, Clone)] - pub struct Unregister { + pub struct Unregister { /// [`Identifiable::Id`] of the object which should be unregistered. pub object_id: O::Id, } /// Generic instruction for a mint of an object to the identifiable destination. #[derive(Debug, Clone)] - pub struct Mint> { + pub struct Mint, D: Identifiable> { /// Object which should be minted. pub object: O, /// Destination object [`Identifiable::Id`]. @@ -194,7 +194,7 @@ mod transparent { /// Generic instruction for a burn of an object to the identifiable destination. #[derive(Debug, Clone)] - pub struct Burn> { + pub struct Burn, D: Identifiable> { /// Object which should be burned. pub object: O, /// Destination object [`Identifiable::Id`]. @@ -214,20 +214,20 @@ mod transparent { /// Generic instruction for granting permission to an entity. #[derive(Debug, Clone)] - pub struct Grant> { + pub struct Grant> { /// Object to grant. pub object: O, /// Entity to which to grant this token. - pub destination_id: D::Id, + pub destination_id: AccountId, } /// Generic instruction for revoking permission from an entity. #[derive(Debug, Clone)] - pub struct Revoke> { + pub struct Revoke> { /// Object to revoke. pub object: O, /// Entity which is being revoked this token from. - pub destination_id: D::Id, + pub destination_id: AccountId, } /// Generic instruction for setting a chain-wide config parameter. @@ -261,49 +261,49 @@ mod transparent { /// Generic instruction for logging messages #[derive(Debug, Clone)] pub struct Log { - /// Message to be logged - pub msg: String, /// Log level of the message pub level: Level, + /// Message to be logged + pub msg: String, } - impl From> for SetKeyValueBox { + impl From> for SetKeyValueExpr { fn from(source: SetKeyValue) -> Self { Self::new(source.object_id.into(), source.key, source.value) } } - impl From> for RemoveKeyValueBox { + impl From> for RemoveKeyValueExpr { fn from(source: RemoveKeyValue) -> Self { Self::new(source.object_id.into(), source.key) } } - impl From> for RegisterBox { + impl From> for RegisterExpr { fn from(source: Register) -> Self { Self::new(source.object.into()) } } - impl From> for UnregisterBox { + impl From> for UnregisterExpr { fn from(source: Unregister) -> Self { Self::new(source.object_id.into()) } } - impl> From> for MintBox { - fn from(source: Mint) -> Self { + impl, D: Identifiable> From> for MintExpr { + fn from(source: Mint) -> Self { Self::new(source.object, source.destination_id.into()) } } - impl> From> for BurnBox { - fn from(source: Burn) -> Self { + impl, D: Identifiable> From> for BurnExpr { + fn from(source: Burn) -> Self { Self::new(source.object, source.destination_id.into()) } } - impl, D: Identifiable> From> for TransferBox { + impl, D: Identifiable> From> for TransferExpr { fn from(source: Transfer) -> Self { Self::new( source.source_id.into(), @@ -313,43 +313,43 @@ mod transparent { } } - impl> From> for GrantBox { - fn from(source: Grant) -> Self { - Self::new(source.object, source.destination_id.into()) + impl> From> for GrantExpr { + fn from(source: Grant) -> Self { + Self::new(source.object, source.destination_id) } } - impl> From> for RevokeBox { - fn from(source: Revoke) -> Self { - Self::new(source.object, source.destination_id.into()) + impl> From> for RevokeExpr { + fn from(source: Revoke) -> Self { + Self::new(source.object, source.destination_id) } } - impl From for SetParameterBox { + impl From for SetParameterExpr { fn from(source: SetParameter) -> Self { Self::new(source.parameter) } } - impl From for NewParameterBox { + impl From for NewParameterExpr { fn from(source: NewParameter) -> Self { Self::new(source.parameter) } } - impl From> for UpgradeBox { - fn from(source: Upgrade) -> Self { + impl From> for UpgradeExpr { + fn from(source: Upgrade) -> Self { Self::new(source.object) } } - impl From for ExecuteTriggerBox { + impl From for ExecuteTriggerExpr { fn from(source: ExecuteTrigger) -> Self { Self::new(source.trigger_id) } } - impl From for LogBox { + impl From for LogExpr { fn from(source: Log) -> Self { Self::new(source.level, source.msg) } @@ -362,9 +362,9 @@ isi! { #[display(fmt = "SET `{parameter}`")] #[serde(transparent)] #[repr(transparent)] - // SAFETY: `SetParameterBox` has no trap representation in `EvaluatesTo` + // SAFETY: `SetParameterExpr` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] - pub struct SetParameterBox { + pub struct SetParameterExpr { /// The configuration parameter being changed. #[serde(flatten)] pub parameter: EvaluatesTo, @@ -377,9 +377,9 @@ isi! { #[display(fmt = "SET `{parameter}`")] #[serde(transparent)] #[repr(transparent)] - // SAFETY: `NewParameterBox` has no trap representation in `EvaluatesTo` + // SAFETY: `NewParameterExpr` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] - pub struct NewParameterBox { + pub struct NewParameterExpr { /// The configuration parameter being created. #[serde(flatten)] pub parameter: EvaluatesTo, @@ -391,7 +391,7 @@ isi! { #[derive(Display)] #[display(fmt = "SET `{key}` = `{value}` IN `{object_id}`")] #[ffi_type] - pub struct SetKeyValueBox { + pub struct SetKeyValueExpr { /// Where to set this key value. #[serde(flatten)] pub object_id: EvaluatesTo, @@ -407,7 +407,7 @@ isi! { #[derive(Display)] #[display(fmt = "REMOVE `{key}` from `{object_id}`")] #[ffi_type] - pub struct RemoveKeyValueBox { + pub struct RemoveKeyValueExpr { /// From where to remove this key value. #[serde(flatten)] pub object_id: EvaluatesTo, @@ -422,9 +422,9 @@ isi! { #[display(fmt = "REGISTER `{object}`")] #[serde(transparent)] #[repr(transparent)] - // SAFETY: `RegisterBox` has no trap representation in `EvaluatesTo` + // SAFETY: `RegisterExpr` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] - pub struct RegisterBox { + pub struct RegisterExpr { /// The object that should be registered, should be uniquely identifiable by its id. pub object: EvaluatesTo, } @@ -436,9 +436,9 @@ isi! { #[display(fmt = "UNREGISTER `{object_id}`")] #[serde(transparent)] #[repr(transparent)] - // SAFETY: `UnregisterBox` has no trap representation in `EvaluatesTo` + // SAFETY: `UnregisterExpr` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] - pub struct UnregisterBox { + pub struct UnregisterExpr { /// The id of the object that should be unregistered. pub object_id: EvaluatesTo, } @@ -449,7 +449,7 @@ isi! { #[derive(Display)] #[display(fmt = "MINT `{object}` TO `{destination_id}`")] #[ffi_type] - pub struct MintBox { + pub struct MintExpr { /// Object to mint. pub object: EvaluatesTo, /// Entity to mint to. @@ -462,7 +462,7 @@ isi! { #[derive(Display)] #[display(fmt = "BURN `{object}` FROM `{destination_id}`")] #[ffi_type] - pub struct BurnBox { + pub struct BurnExpr { /// Object to burn. pub object: EvaluatesTo, /// Entity to burn from. @@ -475,7 +475,7 @@ isi! { #[derive(Display)] #[display(fmt = "TRANSFER `{object}` FROM `{source_id}` TO `{destination_id}`")] #[ffi_type] - pub struct TransferBox { + pub struct TransferExpr { /// Entity to transfer from. pub source_id: EvaluatesTo, /// Object to transfer. @@ -490,11 +490,11 @@ isi! { #[derive(Display)] #[display(fmt = "(`{left_instruction}`, `{right_instruction}`)")] #[ffi_type] - pub struct Pair { + pub struct PairExpr { /// Left instruction - pub left_instruction: InstructionBox, + pub left_instruction: InstructionExpr, /// Right instruction - pub right_instruction: InstructionBox, + pub right_instruction: InstructionExpr, } } @@ -502,15 +502,15 @@ isi! { /// Composite instruction for a sequence of instructions. #[serde(transparent)] #[repr(transparent)] - // SAFETY: `SequenceBox` has no trap representation in `Vec` + // SAFETY: `SequenceExpr` has no trap representation in `Vec` #[ffi_type(unsafe {robust})] - pub struct SequenceBox { + pub struct SequenceExpr { /// Sequence of Iroha Special Instructions to execute. - pub instructions: Vec, + pub instructions: Vec, } } -impl core::fmt::Display for SequenceBox { +impl core::fmt::Display for SequenceExpr { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "SEQUENCE [")?; let mut first = true; @@ -529,17 +529,17 @@ impl core::fmt::Display for SequenceBox { isi! { /// Composite instruction for a conditional execution of other instructions. #[ffi_type] - pub struct Conditional { + pub struct ConditionalExpr { /// Condition to be checked. pub condition: EvaluatesTo, /// Instruction to be executed if condition pass. - pub then: InstructionBox, + pub then: InstructionExpr, /// Optional instruction to be executed if condition fail. - pub otherwise: Option, + pub otherwise: Option, } } -impl core::fmt::Display for Conditional { +impl core::fmt::Display for ConditionalExpr { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!(f, "IF `{}` THEN `{}`", self.condition, self.then)?; if let Some(otherwise) = &self.otherwise { @@ -558,7 +558,7 @@ isi! { #[repr(transparent)] // SAFETY: `Fail` has no trap representation in `String` #[ffi_type(unsafe {robust})] - pub struct FailBox { + pub struct Fail { /// Message to submit. pub message: String, } @@ -569,11 +569,11 @@ isi! { #[derive(Display)] #[display(fmt = "GRANT `{object}` TO `{destination_id}`")] #[ffi_type] - pub struct GrantBox { + pub struct GrantExpr { /// Object to grant. pub object: EvaluatesTo, - /// Entity to which to grant this token. - pub destination_id: EvaluatesTo, + /// Account to which to grant this object. + pub destination_id: EvaluatesTo, } } @@ -582,11 +582,11 @@ isi! { #[derive(Display)] #[display(fmt = "REVOKE `{object}` FROM `{destination_id}`")] #[ffi_type] - pub struct RevokeBox { - /// Object to grant. + pub struct RevokeExpr { + /// Object to revoke. pub object: EvaluatesTo, - /// Entity to which to grant this token. - pub destination_id: EvaluatesTo, + /// Account to which to revoke this object from. + pub destination_id: EvaluatesTo, } } @@ -596,9 +596,9 @@ isi! { #[display(fmt = "EXECUTE `{trigger_id}`")] #[serde(transparent)] #[repr(transparent)] - // SAFETY: `ExecuteTriggerBox` has no trap representation in `TriggerId` + // SAFETY: `ExecuteTriggerExpr` has no trap representation in `TriggerId` #[ffi_type(unsafe {robust})] - pub struct ExecuteTriggerBox { + pub struct ExecuteTriggerExpr { /// Id of a trigger to execute pub trigger_id: EvaluatesTo, } @@ -610,9 +610,9 @@ isi! { #[display(fmt = "UPGRADE `{object}`")] #[serde(transparent)] #[repr(transparent)] - // SAFETY: `UpgradeBox` has no trap representation in `EvaluatesTo` + // SAFETY: `UpgradeExpr` has no trap representation in `EvaluatesTo` #[ffi_type(unsafe {robust})] - pub struct UpgradeBox { + pub struct UpgradeExpr { /// The object to upgrade. pub object: EvaluatesTo, } @@ -623,7 +623,7 @@ isi! { #[derive(Display)] #[display(fmt = "LOG({level}): {msg}")] #[ffi_type] - pub struct LogBox { + pub struct LogExpr { /// Message log level #[serde(flatten)] pub level: EvaluatesTo, @@ -632,8 +632,8 @@ isi! { } } -impl ExecuteTriggerBox { - /// Construct [`ExecuteTriggerBox`] +impl ExecuteTriggerExpr { + /// Construct [`ExecuteTriggerExpr`] pub fn new(trigger_id: I) -> Self where I: Into>, @@ -644,9 +644,9 @@ impl ExecuteTriggerBox { } } -impl RevokeBox { +impl RevokeExpr { /// Generic constructor. - pub fn new>, I: Into>>( + pub fn new>, I: Into>>( object: P, destination_id: I, ) -> Self { @@ -657,9 +657,9 @@ impl RevokeBox { } } -impl GrantBox { +impl GrantExpr { /// Constructor. - pub fn new>, I: Into>>( + pub fn new>, I: Into>>( object: P, destination_id: I, ) -> Self { @@ -670,8 +670,8 @@ impl GrantBox { } } -impl SetKeyValueBox { - /// Construct [`SetKeyValueBox`]. +impl SetKeyValueExpr { + /// Construct [`SetKeyValueExpr`]. pub fn new< I: Into>, K: Into>, @@ -689,8 +689,8 @@ impl SetKeyValueBox { } } -impl RemoveKeyValueBox { - /// Construct [`RemoveKeyValueBox`]. +impl RemoveKeyValueExpr { + /// Construct [`RemoveKeyValueExpr`]. pub fn new>, K: Into>>( object_id: I, key: K, @@ -702,7 +702,7 @@ impl RemoveKeyValueBox { } } -impl RegisterBox { +impl RegisterExpr { /// Construct [`Register`]. pub fn new>>(object: O) -> Self { Self { @@ -711,7 +711,7 @@ impl RegisterBox { } } -impl UnregisterBox { +impl UnregisterExpr { /// Construct [`Unregister`]. pub fn new>>(object_id: O) -> Self { Self { @@ -720,7 +720,7 @@ impl UnregisterBox { } } -impl MintBox { +impl MintExpr { /// Construct [`Mint`]. pub fn new>, D: Into>>( object: O, @@ -733,7 +733,7 @@ impl MintBox { } } -impl BurnBox { +impl BurnExpr { /// Construct [`Burn`]. pub fn new>, D: Into>>( object: O, @@ -746,7 +746,7 @@ impl BurnBox { } } -impl TransferBox { +impl TransferExpr { /// Construct [`Transfer`]. pub fn new< S: Into>, @@ -765,31 +765,34 @@ impl TransferBox { } } -impl Pair { +impl PairExpr { /// Construct [`Pair`]. - pub fn new, RI: Into>( + pub fn new, RI: Into>( left_instruction: LI, right_instruction: RI, ) -> Self { - Pair { + PairExpr { left_instruction: left_instruction.into(), right_instruction: right_instruction.into(), } } } -impl SequenceBox { - /// Construct [`SequenceBox`]. - pub fn new(instructions: impl IntoIterator) -> Self { +impl SequenceExpr { + /// Construct [`SequenceExpr`]. + pub fn new(instructions: impl IntoIterator) -> Self { Self { instructions: instructions.into_iter().collect(), } } } -impl Conditional { +impl ConditionalExpr { /// Construct [`If`]. - pub fn new>, T: Into>(condition: C, then: T) -> Self { + pub fn new>, T: Into>( + condition: C, + then: T, + ) -> Self { Self { condition: condition.into(), then: then.into(), @@ -799,8 +802,8 @@ impl Conditional { /// [`If`] constructor with `Otherwise` instruction. pub fn with_otherwise< C: Into>, - T: Into, - O: Into, + T: Into, + O: Into, >( condition: C, then: T, @@ -814,7 +817,7 @@ impl Conditional { } } -impl FailBox { +impl Fail { /// Construct [`Fail`]. pub fn new(message: &str) -> Self { Self { @@ -823,8 +826,8 @@ impl FailBox { } } -impl SetParameterBox { - /// Construct [`SetParameterBox`]. +impl SetParameterExpr { + /// Construct [`SetParameterExpr`]. pub fn new>>(parameter: P) -> Self { Self { parameter: parameter.into(), @@ -832,8 +835,8 @@ impl SetParameterBox { } } -impl NewParameterBox { - /// Construct [`NewParameterBox`]. +impl NewParameterExpr { + /// Construct [`NewParameterExpr`]. pub fn new>>(parameter: P) -> Self { Self { parameter: parameter.into(), @@ -841,8 +844,8 @@ impl NewParameterBox { } } -impl UpgradeBox { - /// Construct [`UpgradeBox`]. +impl UpgradeExpr { + /// Construct [`UpgradeExpr`]. pub fn new>>(object: O) -> Self { Self { object: object.into(), @@ -850,8 +853,8 @@ impl UpgradeBox { } } -impl LogBox { - /// Construct [`LogBox`] +impl LogExpr { + /// Construct [`LogExpr`] pub fn new>, M: Into>>( level: L, msg: M, @@ -1212,10 +1215,10 @@ pub mod error { /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{ - Burn, BurnBox, Conditional, ExecuteTrigger, ExecuteTriggerBox, FailBox, Grant, GrantBox, - InstructionBox, Log, LogBox, Mint, MintBox, NewParameter, NewParameterBox, Pair, Register, - RegisterBox, RemoveKeyValue, RemoveKeyValueBox, Revoke, RevokeBox, SequenceBox, - SetKeyValue, SetKeyValueBox, SetParameter, SetParameterBox, Transfer, TransferBox, - Unregister, UnregisterBox, Upgrade, UpgradeBox, + Burn, BurnExpr, ConditionalExpr, ExecuteTrigger, ExecuteTriggerExpr, Fail, Grant, + GrantExpr, InstructionExpr, Log, LogExpr, Mint, MintExpr, NewParameter, NewParameterExpr, + PairExpr, Register, RegisterExpr, RemoveKeyValue, RemoveKeyValueExpr, Revoke, RevokeExpr, + SequenceExpr, SetKeyValue, SetKeyValueExpr, SetParameter, SetParameterExpr, Transfer, + TransferExpr, Unregister, UnregisterExpr, Upgrade, UpgradeExpr, }; } diff --git a/data_model/src/lib.rs b/data_model/src/lib.rs index 6e3f2bdaee0..8bb1f0d1a98 100644 --- a/data_model/src/lib.rs +++ b/data_model/src/lib.rs @@ -1,15 +1,10 @@ //! Iroha Data Model contains structures for Domains, Peers, Accounts and Assets with simple, //! non-specific functions like serialization. -#![allow( - clippy::module_name_repetitions, - clippy::unwrap_in_result, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects, - clippy::trait_duplication_in_bounds, - clippy::extra_unused_lifetimes, // Thanks to `EnumKind` not knowing how to write a derive macro. - clippy::items_after_test_module, // Clippy bug -)] +// Clippy bug +#![allow(clippy::items_after_test_module)] +// in no_std some code gets cfg-ed out, so we silence the warnings +#![cfg_attr(not(feature = "std"), allow(unused, unused_tuple_struct_fields))] #![cfg_attr(not(feature = "std"), no_std)] #[cfg(not(feature = "std"))] @@ -30,11 +25,10 @@ use core::{ str::FromStr, }; -use block::VersionedCommittedBlock; +use block::SignedBlock; #[cfg(not(target_arch = "aarch64"))] use derive_more::Into; -use derive_more::{AsRef, DebugCustom, Deref, Display, From, FromStr}; -use evaluate::Evaluate; +use derive_more::{AsRef, Constructor, DebugCustom, Deref, Display, From, FromStr}; use events::TriggeringFilterBox; use getset::Getters; use iroha_crypto::{HashOf, PublicKey}; @@ -47,9 +41,10 @@ use iroha_primitives::{ small::{Array as SmallArray, SmallVec}, }; use iroha_schema::IntoSchema; +use iroha_version::{declare_versioned_with_scale, version_with_scale}; pub use numeric::model::NumericValue; use parity_scale_codec::{Decode, Encode}; -use prelude::{Executable, TransactionQueryOutput, VersionedSignedTransaction}; +use prelude::{Executable, SignedTransaction, TransactionQueryOutput}; use serde::{Deserialize, Serialize}; use serde_with::{DeserializeFromStr, SerializeDisplay}; use strum::FromRepr; @@ -63,6 +58,7 @@ pub mod block; pub mod domain; pub mod evaluate; pub mod events; +pub mod executor; pub mod expression; pub mod ipfs; pub mod isi; @@ -75,11 +71,10 @@ pub mod permission; pub mod predicate; pub mod query; pub mod role; +pub mod smart_contract; pub mod transaction; pub mod trigger; -pub mod validator; pub mod visit; -pub mod wasm; mod seal { use crate::{isi::prelude::*, query::prelude::*}; @@ -94,28 +89,28 @@ mod seal { impl_sealed! { // Boxed instructions - InstructionBox, - SetKeyValueBox, - RemoveKeyValueBox, - RegisterBox, - UnregisterBox, - MintBox, - BurnBox, - TransferBox, - GrantBox, - RevokeBox, - SetParameterBox, - NewParameterBox, - UpgradeBox, - ExecuteTriggerBox, - LogBox, + InstructionExpr, + SetKeyValueExpr, + RemoveKeyValueExpr, + RegisterExpr, + UnregisterExpr, + MintExpr, + BurnExpr, + TransferExpr, + GrantExpr, + RevokeExpr, + SetParameterExpr, + NewParameterExpr, + UpgradeExpr, + ExecuteTriggerExpr, + LogExpr, // Composite instructions - SequenceBox, - Conditional, - Pair, + SequenceExpr, + ConditionalExpr, + PairExpr, - FailBox, + Fail, // Boxed queries QueryBox, @@ -136,7 +131,6 @@ mod seal { FindAssetsByDomainIdAndAssetDefinitionId, FindAssetQuantityById, FindTotalAssetQuantityByAssetDefinitionId, - IsAssetDefinitionOwner, FindAssetKeyValueByIdAndKey, FindAssetDefinitionKeyValueByIdAndKey, FindAllDomains, @@ -203,7 +197,6 @@ pub struct EnumTryAsError { } // Manual implementation because this allow annotation does not affect `Display` derive -#[allow(clippy::use_debug)] impl fmt::Display for EnumTryAsError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> fmt::Result { write!( @@ -457,24 +450,24 @@ pub mod parameter { } /// Create sequence isi for setting parameters - pub fn into_set_parameters(self) -> isi::SequenceBox { - isi::SequenceBox { + pub fn into_set_parameters(self) -> isi::SequenceExpr { + isi::SequenceExpr { instructions: self .parameters .into_iter() - .map(isi::SetParameterBox::new) + .map(isi::SetParameterExpr::new) .map(Into::into) .collect(), } } /// Create sequence isi for creating parameters - pub fn into_create_parameters(self) -> isi::SequenceBox { - isi::SequenceBox { + pub fn into_create_parameters(self) -> isi::SequenceExpr { + isi::SequenceExpr { instructions: self .parameters .into_iter() - .map(isi::NewParameterBox::new) + .map(isi::NewParameterExpr::new) .map(Into::into) .collect(), } @@ -638,7 +631,7 @@ pub mod model { Asset(::With), /// [`Trigger`](`trigger::Trigger`) variant. #[display(fmt = "Trigger {_0}")] - Trigger( as Registered>::With), + Trigger( as Registered>::With), /// [`Role`](`role::Role`) variant. #[display(fmt = "Role {_0}")] Role(::With), @@ -680,40 +673,14 @@ pub mod model { AssetDefinition(asset::AssetDefinition), /// [`Asset`](`asset::Asset`) variant. Asset(asset::Asset), - /// [`TriggerBox`] variant. - Trigger(TriggerBox), + /// [`Trigger`](`trigger::Trigger`) variant. + Trigger(trigger::Trigger), /// [`Role`](`role::Role`) variant. Role(role::Role), /// [`Parameter`](`parameter::Parameter`) variant. Parameter(parameter::Parameter), } - /// Sized container for triggers with different executables. - #[derive( - Debug, - Display, - Clone, - PartialEq, - Eq, - PartialOrd, - Ord, - FromVariant, - Decode, - Encode, - Deserialize, - Serialize, - IntoSchema, - )] - #[ffi_type] - pub enum TriggerBox { - /// Un-optimized [`Trigger`](`trigger::Trigger`) submitted from client to Iroha. - #[display(fmt = "{_0}")] - Raw(trigger::Trigger), - /// Optimized [`Trigger`](`trigger::Trigger`) returned from Iroha to client. - #[display(fmt = "{_0} (optimised)")] - Optimized(trigger::Trigger), - } - /// Sized container for all possible upgradable entities. #[derive( Debug, @@ -730,14 +697,14 @@ pub mod model { Serialize, IntoSchema, )] - // SAFETY: `UpgradableBox` has no trap representations in `validator::Validator` + // SAFETY: `UpgradableBox` has no trap representations in `executor::Executor` #[ffi_type(unsafe {robust})] #[serde(untagged)] // Unaffected by #3330, because stores binary data with no `u128` #[repr(transparent)] pub enum UpgradableBox { - /// [`Validator`](`validator::Validator`) variant. - #[display(fmt = "Validator")] - Validator(validator::Validator), + /// [`Executor`](`executor::Executor`) variant. + #[display(fmt = "Executor")] + Executor(executor::Executor), } /// Sized container for all possible values. @@ -780,14 +747,14 @@ pub mod model { PermissionToken(permission::PermissionToken), PermissionTokenSchema(permission::PermissionTokenSchema), Hash(HashValue), - Block(VersionedCommittedBlockWrapper), + Block(SignedBlockWrapper), BlockHeader(block::BlockHeader), Ipv4Addr(iroha_primitives::addr::Ipv4Addr), Ipv6Addr(iroha_primitives::addr::Ipv6Addr), #[serde_partially_tagged(untagged)] #[debug(fmt = "{_0:?}")] Numeric(NumericValue), - Validator(validator::Validator), + Executor(executor::Executor), LogLevel(Level), } @@ -810,12 +777,12 @@ pub mod model { #[ffi_type] pub enum HashValue { /// Transaction hash - Transaction(HashOf), + Transaction(HashOf), /// Block hash - Block(HashOf), + Block(HashOf), } - /// Cross-platform wrapper for [`VersionedCommittedBlock`]. + /// Cross-platform wrapper for [`SignedBlock`]. #[cfg(not(target_arch = "aarch64"))] #[derive( Debug, @@ -834,12 +801,12 @@ pub mod model { Serialize, IntoSchema, )] - // SAFETY: VersionedCommittedBlockWrapper has no trap representations in VersionedCommittedBlock + // SAFETY: SignedBlockWrapper has no trap representations in SignedBlock #[schema(transparent)] #[ffi_type(unsafe {robust})] #[serde(transparent)] #[repr(transparent)] - pub struct VersionedCommittedBlockWrapper(VersionedCommittedBlock); + pub struct SignedBlockWrapper(SignedBlock); /// Cross-platform wrapper for `BlockValue`. #[cfg(target_arch = "aarch64")] @@ -863,11 +830,11 @@ pub mod model { #[as_ref(forward)] #[deref(forward)] #[from(forward)] - // SAFETY: VersionedCommittedBlockWrapper has no trap representations in Box + // SAFETY: SignedBlockWrapper has no trap representations in Box #[ffi_type(unsafe {robust})] #[serde(transparent)] #[repr(transparent)] - pub struct VersionedCommittedBlockWrapper(pub(super) Box); + pub struct SignedBlockWrapper(pub(super) Box); /// Limits of length of the identifiers (e.g. in [`domain::Domain`], [`account::Account`], [`asset::AssetDefinition`]) in number of chars #[derive( @@ -901,10 +868,10 @@ pub mod model { /// # Note /// /// Keep in mind that *Validation* is not the right term - /// (because *Runtime Validator* actually does execution too) and other names + /// (because *Runtime Executor* actually does execution too) and other names /// (like *Verification* or *Execution*) are being discussed. /// - /// TODO: Move to `validator` module + /// TODO: Move to `executor` module #[derive( Debug, displaydoc::Display, @@ -941,11 +908,11 @@ pub mod model { /// For example it's a very big WASM binary. /// /// It's different from [`TransactionRejectionReason::LimitCheck`] because it depends on - /// validator. + /// executor. TooComplex, /// Internal error occurred, please contact the support or check the logs if you are the node owner /// - /// Usually means a bug inside **Runtime Validator** or **Iroha** implementation. + /// Usually means a bug inside **Runtime Executor** or **Iroha** implementation. InternalError( /// Contained error message if its used internally. Empty for external users. /// Never serialized to not to expose internal errors to the end user. @@ -990,16 +957,20 @@ pub mod model { /// Error ERROR, } -} - -impl Identifiable for TriggerBox { - type Id = trigger::TriggerId; - fn id(&self) -> &Self::Id { - match self { - TriggerBox::Raw(trigger) => trigger.id(), - TriggerBox::Optimized(trigger) => trigger.id(), - } + /// Batched response of a query sent to torii + #[derive( + Debug, Clone, Constructor, Getters, Decode, Encode, Deserialize, Serialize, IntoSchema, + )] + #[version_with_scale(version = 1, versioned_alias = "BatchedResponse")] + #[getset(get = "pub")] + #[must_use] + pub struct BatchedResponseV1 { + /// Current batch + pub batch: T, + /// Index of the next element in the result set. Client will use this value + /// in the next request to continue fetching results of the original query + pub cursor: crate::query::cursor::ForwardCursor, } } @@ -1055,8 +1026,8 @@ macro_rules! val_vec { } #[cfg(target_arch = "aarch64")] -impl From for VersionedCommittedBlock { - fn from(block_value: VersionedCommittedBlockWrapper) -> Self { +impl From for SignedBlock { + fn from(block_value: SignedBlockWrapper) -> Self { *block_value.0 } } @@ -1068,7 +1039,6 @@ impl fmt::Display for Value { Value::Bool(v) => fmt::Display::fmt(&v, f), Value::String(v) => fmt::Display::fmt(&v, f), Value::Name(v) => fmt::Display::fmt(&v, f), - #[allow(clippy::use_debug)] Value::Vec(v) => { // TODO: Remove so we can derive. let list_of_display: Vec<_> = v.iter().map(ToString::to_string).collect(); @@ -1094,7 +1064,7 @@ impl fmt::Display for Value { Value::MetadataLimits(v) => fmt::Display::fmt(&v, f), Value::TransactionLimits(v) => fmt::Display::fmt(&v, f), Value::LengthLimits(v) => fmt::Display::fmt(&v, f), - Value::Validator(v) => write!(f, "Validator({} bytes)", v.wasm.as_ref().len()), + Value::Executor(v) => write!(f, "Executor({} bytes)", v.wasm.as_ref().len()), Value::LogLevel(v) => fmt::Display::fmt(&v, f), } } @@ -1125,17 +1095,17 @@ impl Value { | TransactionLimits(_) | LengthLimits(_) | Numeric(_) - | Validator(_) - | LogLevel(_) => 1_usize, + | Executor(_) + | LogLevel(_) + | SignatureCheckCondition(_) => 1_usize, Vec(v) => v.iter().map(Self::len).sum::() + 1_usize, LimitedMetadata(data) => data.nested_len() + 1_usize, - SignatureCheckCondition(s) => Evaluate::len(&s.0), } } } -impl From for Value { - fn from(block_value: VersionedCommittedBlock) -> Self { +impl From for Value { + fn from(block_value: SignedBlock) -> Self { Value::Block(block_value.into()) } } @@ -1321,14 +1291,14 @@ from_and_try_from_value_identifiable!( Account(account::Account), AssetDefinition(asset::AssetDefinition), Asset(asset::Asset), - Trigger(TriggerBox), + Trigger(trigger::Trigger), Role(role::Role), Parameter(parameter::Parameter), ); from_and_try_from_and_try_as_value_hash! { - Transaction(HashOf), - Block(HashOf), + Transaction(HashOf), + Block(HashOf), } from_and_try_from_and_try_as_value_numeric! { @@ -1374,13 +1344,10 @@ impl TryFrom for RegistrableBox { } NewRole(role) => Ok(RegistrableBox::Role(role)), Asset(asset) => Ok(RegistrableBox::Asset(asset)), - Trigger(TriggerBox::Raw(trigger)) => Ok(RegistrableBox::Trigger(trigger)), - Domain(_) - | Account(_) - | AssetDefinition(_) - | Role(_) - | Parameter(_) - | Trigger(TriggerBox::Optimized(_)) => Err(Self::Error::default()), + Trigger(trigger) => Ok(RegistrableBox::Trigger(trigger)), + Domain(_) | Account(_) | AssetDefinition(_) | Role(_) | Parameter(_) => { + Err(Self::Error::default()) + } } } } @@ -1398,7 +1365,7 @@ impl From for IdentifiableBox { } Role(role) => IdentifiableBox::NewRole(role), Asset(asset) => IdentifiableBox::Asset(asset), - Trigger(trigger) => IdentifiableBox::Trigger(TriggerBox::Raw(trigger)), + Trigger(trigger) => IdentifiableBox::Trigger(trigger), } } } @@ -1428,7 +1395,7 @@ where } } -impl TryFrom for VersionedCommittedBlock { +impl TryFrom for SignedBlock { type Error = ErrorTryFromEnum; fn try_from(value: Value) -> Result { @@ -1468,49 +1435,12 @@ impl TryFrom for Value { } } -impl From> for Value { - fn from(trigger: trigger::Trigger) -> Self { - Value::Identifiable(IdentifiableBox::Trigger(TriggerBox::Raw(trigger))) - } -} - -impl From> for Value { - fn from(trigger: trigger::Trigger) -> Self { - Value::Identifiable(IdentifiableBox::Trigger(TriggerBox::Optimized(trigger))) - } -} - -impl TryFrom for trigger::Trigger { - type Error = ErrorTryFromEnum; - - fn try_from(value: Value) -> Result { - if let Value::Identifiable(IdentifiableBox::Trigger(TriggerBox::Raw(trigger))) = value { - return Ok(trigger); - } - - Err(Self::Error::default()) - } -} - -impl TryFrom for trigger::Trigger { - type Error = ErrorTryFromEnum; - - fn try_from(value: Value) -> Result { - if let Value::Identifiable(IdentifiableBox::Trigger(TriggerBox::Optimized(trigger))) = value - { - return Ok(trigger); - } - - Err(Self::Error::default()) - } -} - impl TryFrom for UpgradableBox { type Error = ErrorTryFromEnum; fn try_from(value: Value) -> Result { match value { - Value::Validator(validator) => Ok(Self::Validator(validator)), + Value::Executor(executor) => Ok(Self::Executor(executor)), _ => Err(Self::Error::default()), } } @@ -1730,7 +1660,7 @@ where assert!( eq(&a().not().not(), &a()), "Double negation elimination doesn't hold for {typ}", - ) + ); } for a in &values { @@ -1837,61 +1767,17 @@ pub trait PredicateTrait { pub fn current_time() -> core::time::Duration { use std::time::SystemTime; - #[allow(clippy::expect_used)] SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .expect("Failed to get the current system time") } -#[cfg(feature = "http")] -pub mod http { - //! Structures related to HTTP communication - - use iroha_data_model_derive::model; - use iroha_schema::IntoSchema; - use iroha_version::declare_versioned_with_scale; - - pub use self::model::*; - use crate::prelude::QueryOutput; - - declare_versioned_with_scale!(VersionedBatchedResponse 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema); - - #[model] - pub mod model { - use getset::Getters; - use iroha_version::version_with_scale; - use parity_scale_codec::{Decode, Encode}; - use serde::{Deserialize, Serialize}; - - use super::*; - - /// Batched response of a query sent to torii - #[derive(Debug, Clone, Getters, Decode, Encode, Deserialize, Serialize, IntoSchema)] - #[version_with_scale(version = 1, versioned_alias = "VersionedBatchedResponse")] - #[getset(get = "pub")] - #[must_use] - pub struct BatchedResponse { - /// Current batch - pub batch: T, - /// Index of the next element in the result set. Client will use this value - /// in the next request to continue fetching results of the original query - pub cursor: crate::query::cursor::ForwardCursor, - } - } +declare_versioned_with_scale!(BatchedResponse 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema); - impl From> for QueryOutput { - #[inline] - fn from(source: BatchedResponse) -> Self { - source.batch - } - } - - impl From> for (T, crate::query::cursor::ForwardCursor) { - fn from(source: BatchedResponse) -> Self { - let BatchedResponse { batch, cursor } = source; - - (batch, cursor) - } +impl From> for (T, crate::query::cursor::ForwardCursor) { + fn from(source: BatchedResponse) -> Self { + let BatchedResponse::V1(batch) = source; + (batch.batch, batch.cursor) } } @@ -1944,11 +1830,11 @@ pub mod prelude { pub use super::current_time; pub use super::{ account::prelude::*, asset::prelude::*, domain::prelude::*, evaluate::prelude::*, - events::prelude::*, expression::prelude::*, isi::prelude::*, metadata::prelude::*, - name::prelude::*, parameter::prelude::*, peer::prelude::*, permission::prelude::*, - query::prelude::*, role::prelude::*, transaction::prelude::*, trigger::prelude::*, - validator::prelude::*, EnumTryAsError, HasMetadata, IdBox, Identifiable, IdentifiableBox, - LengthLimits, NumericValue, PredicateTrait, RegistrableBox, ToValue, TriggerBox, TryAsMut, - TryAsRef, TryToValue, UpgradableBox, ValidationFail, Value, + events::prelude::*, executor::prelude::*, expression::prelude::*, isi::prelude::*, + metadata::prelude::*, name::prelude::*, parameter::prelude::*, peer::prelude::*, + permission::prelude::*, query::prelude::*, role::prelude::*, transaction::prelude::*, + trigger::prelude::*, EnumTryAsError, HasMetadata, IdBox, Identifiable, IdentifiableBox, + LengthLimits, NumericValue, PredicateTrait, RegistrableBox, ToValue, TryAsMut, TryAsRef, + TryToValue, UpgradableBox, ValidationFail, Value, }; } diff --git a/data_model/src/metadata.rs b/data_model/src/metadata.rs index 8b3bd78d4a8..563c8a143ff 100644 --- a/data_model/src/metadata.rs +++ b/data_model/src/metadata.rs @@ -306,8 +306,6 @@ pub mod prelude { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - #[cfg(not(feature = "std"))] use alloc::{borrow::ToOwned as _, vec}; use core::str::FromStr as _; diff --git a/data_model/src/name.rs b/data_model/src/name.rs index fd586bb26a5..9de18a5a7c3 100644 --- a/data_model/src/name.rs +++ b/data_model/src/name.rs @@ -106,7 +106,8 @@ impl FromStr for Name { type Err = ParseError; fn from_str(candidate: &str) -> Result { - Self::validate_str(candidate).map(|_| Self(ConstString::from(candidate))) + Self::validate_str(candidate)?; + Ok(Self(ConstString::from(candidate))) } } @@ -114,7 +115,8 @@ impl TryFrom for Name { type Error = ParseError; fn try_from(candidate: String) -> Result { - Self::validate_str(&candidate).map(|_| Self(ConstString::from(candidate))) + Self::validate_str(&candidate)?; + Ok(Self(ConstString::from(candidate))) } } @@ -125,17 +127,17 @@ impl<'de> Deserialize<'de> for Name { { use serde::de::Error as _; - let name = ConstString::deserialize(deserializer)?; - Self::validate_str(&name) - .map(|_| Self(name)) - .map_err(D::Error::custom) + let candidate = ConstString::deserialize(deserializer)?; + Self::validate_str(&candidate).map_err(D::Error::custom)?; + + Ok(Self(candidate)) } } impl Decode for Name { fn decode(input: &mut I) -> Result { let name = ConstString::decode(input)?; Self::validate_str(&name) - .map(|_| Self(name)) + .map(|()| Self(name)) .map_err(|error| error.reason.into()) } } @@ -147,8 +149,6 @@ pub mod prelude { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - use parity_scale_codec::DecodeAll; use super::*; diff --git a/data_model/src/numeric.rs b/data_model/src/numeric.rs index e48323d3b0c..0bf04ac6e93 100644 --- a/data_model/src/numeric.rs +++ b/data_model/src/numeric.rs @@ -136,7 +136,7 @@ impl serde::Serialize for NumericValue { where S: Serializer, { - serializer.serialize_str(&format!("{:?}", self)) + serializer.serialize_str(&format!("{self:?}")) } } diff --git a/data_model/src/permission.rs b/data_model/src/permission.rs index 2e1fcd025b5..ce3fd423e28 100644 --- a/data_model/src/permission.rs +++ b/data_model/src/permission.rs @@ -46,7 +46,7 @@ pub mod model { pub payload: StringWithJson, } - /// Description of tokens defined in the validator + /// Description of tokens defined in the executor #[derive( Debug, Display, diff --git a/data_model/src/predicate.rs b/data_model/src/predicate.rs index b619681f9a1..119625f6293 100644 --- a/data_model/src/predicate.rs +++ b/data_model/src/predicate.rs @@ -23,13 +23,13 @@ mod nontrivial { /// Extend the sequence with elements of another non-empty sequence #[inline] pub fn extend(&mut self, other: Self) { - self.0.extend(other.0) + self.0.extend(other.0); } /// Append `value` to the end of the sequence #[inline] pub fn push(&mut self, value: T) { - self.0.push(value) + self.0.push(value); } /// Apply the provided function to every element of the sequence @@ -91,11 +91,11 @@ macro_rules! nontrivial { // references (e.g. &Value). pub enum GenericPredicateBox

{ /// Logically `&&` the results of applying the predicates. - And(NonTrivial>), + And(NonTrivial), /// Logically `||` the results of applying the predicats. - Or(NonTrivial>), + Or(NonTrivial), /// Negate the result of applying the predicate. - Not(Box>), + Not(Box), /// The raw predicate that must be applied. #[serde_partially_tagged(untagged)] Raw(P), @@ -230,8 +230,6 @@ impl Default for PredicateBox { #[cfg(test)] pub mod test { - #![allow(clippy::print_stdout, clippy::use_debug)] - use super::{value, PredicateBox}; use crate::{PredicateSymbol, PredicateTrait as _, ToValue}; @@ -841,7 +839,6 @@ pub mod numerical { } #[test] - #[allow(clippy::panic_in_result_fn)] // ? for syntax simplicity. fn semi_interval_semantics_fixed() -> Result<(), fixed::FixedPointOperationError> { let pred = SemiRange::Fixed((Fixed::try_from(1_f64)?, Fixed::try_from(100_f64)?).into()); @@ -903,7 +900,6 @@ pub mod numerical { } #[test] - #[allow(clippy::panic_in_result_fn)] // ? for syntax simplicity. fn interval_semantics_fixed() -> Result<(), fixed::FixedPointOperationError> { let pred = Range::Fixed((Fixed::try_from(1_f64)?, Fixed::try_from(100_f64)?).into()); @@ -992,7 +988,7 @@ pub mod value { Display(string::StringPredicate), /// Apply predicate to the numerical value. Numerical(numerical::SemiRange), - /// Timestamp (currently for [`VersionedCommittedBlock`] only). + /// Timestamp (currently for [`SignedBlock`] only). TimeStamp(numerical::SemiInterval), /// IpAddress enumerable by `u32` Ipv4Addr(ip_addr::Ipv4Predicate), @@ -1058,7 +1054,9 @@ pub mod value { ValuePredicate::Numerical(pred) => pred.applies(input), ValuePredicate::Display(pred) => pred.applies(&input.to_string()), ValuePredicate::TimeStamp(pred) => match input { - Value::Block(block) => pred.applies(block.as_v1().header.timestamp), + Value::Block(block) => { + pred.applies(block.payload().header.timestamp().as_millis()) + } _ => false, }, ValuePredicate::Ipv4Addr(pred) => match input { @@ -1156,7 +1154,6 @@ pub mod value { } #[cfg(test)] - #[allow(clippy::print_stdout, clippy::use_debug)] mod test { use peer::Peer; use prelude::Metadata; @@ -1333,7 +1330,7 @@ pub mod ip_addr { self.0 .iter() .copied() - .zip(input.into_iter()) + .zip(input) .all(|(myself, other)| myself.applies(other)) } } @@ -1369,7 +1366,6 @@ pub mod ip_addr { #[cfg(test)] mod test { - #![allow(clippy::restriction)] use super::*; #[test] diff --git a/data_model/src/query/cursor.rs b/data_model/src/query/cursor.rs index 1c3f4a24542..e18f707f3ad 100644 --- a/data_model/src/query/cursor.rs +++ b/data_model/src/query/cursor.rs @@ -1,5 +1,12 @@ //! Structures and traits related to server-side cursor. +#[cfg(not(feature = "std"))] +use alloc::{ + format, + string::{String, ToString as _}, + vec, + vec::Vec, +}; use core::num::{NonZeroU64, NonZeroUsize}; use getset::Getters; @@ -9,6 +16,7 @@ use parity_scale_codec::{Decode, Encode, Input}; use serde::Serialize; pub use self::model::*; +use super::QueryId; const QUERY_ID: &str = "query_id"; const CURSOR: &str = "cursor"; @@ -23,10 +31,17 @@ pub mod model { pub struct ForwardCursor { /// Unique ID of query. When provided in a query the query will look up if there /// is was already a query with a matching ID and resume returning result batches - pub query_id: Option, + pub query_id: Option, /// Pointer to the next element in the result set pub cursor: Option, } + + impl ForwardCursor { + /// Create a new cursor. + pub const fn new(query_id: Option, cursor: Option) -> Self { + Self { query_id, cursor } + } + } } mod candidate { @@ -36,7 +51,7 @@ mod candidate { #[derive(Decode, Deserialize)] struct ForwardCursorCandidate { - query_id: Option, + query_id: Option, cursor: Option, } @@ -78,7 +93,7 @@ mod candidate { } } -impl From for Vec<(&'static str, String)> { +impl From for Vec<(&'static str, QueryId)> { fn from(cursor: ForwardCursor) -> Self { match (cursor.query_id, cursor.cursor) { (Some(query_id), Some(cursor)) => { diff --git a/data_model/src/query/mod.rs b/data_model/src/query/mod.rs index 512c39e845c..d5df0b85016 100644 --- a/data_model/src/query/mod.rs +++ b/data_model/src/query/mod.rs @@ -3,22 +3,25 @@ #![allow(clippy::missing_inline_in_public_items, unused_imports)] #[cfg(not(feature = "std"))] -use alloc::{boxed::Box, format, string::String, vec::Vec}; -use core::cmp::Ordering; +use alloc::{ + boxed::Box, + format, + string::{String, ToString as _}, + vec, + vec::Vec, +}; +use core::{cmp::Ordering, num::NonZeroU32}; -#[cfg(feature = "http")] pub use cursor::ForwardCursor; -use derive_more::Display; +use derive_more::{Constructor, Display}; use iroha_crypto::{PublicKey, SignatureOf}; use iroha_data_model_derive::model; use iroha_macro::FromVariant; use iroha_schema::IntoSchema; use iroha_version::prelude::*; -#[cfg(feature = "http")] pub use pagination::Pagination; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; -#[cfg(feature = "http")] pub use sorting::Sorting; pub use self::model::*; @@ -28,19 +31,52 @@ use self::{ }; use crate::{ account::Account, - block::VersionedCommittedBlock, + block::SignedBlock, seal, - transaction::{TransactionPayload, TransactionValue, VersionedSignedTransaction}, + transaction::{SignedTransaction, TransactionPayload, TransactionValue}, Identifiable, Value, }; -#[cfg(feature = "http")] pub mod cursor; -#[cfg(feature = "http")] pub mod pagination; -#[cfg(feature = "http")] pub mod sorting; +const FETCH_SIZE: &str = "fetch_size"; + +/// Default value for `fetch_size` parameter in queries. +// SAFETY: `10` is greater than `0` +#[allow(unsafe_code)] +pub const DEFAULT_FETCH_SIZE: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(10) }; + +/// Max value for `fetch_size` parameter in queries. +// SAFETY: `10_000` is greater than `0` +#[allow(unsafe_code)] +pub const MAX_FETCH_SIZE: NonZeroU32 = unsafe { NonZeroU32::new_unchecked(10_000) }; + +/// Structure for query fetch size parameter encoding/decoding +#[derive( + Debug, Default, Clone, Copy, PartialEq, Eq, Constructor, Decode, Encode, Deserialize, Serialize, +)] +pub struct FetchSize { + /// Inner value of a fetch size. + /// + /// If not specified then [`DEFAULT_FETCH_SIZE`] is used. + pub fetch_size: Option, +} + +impl FetchSize { + /// Converts self to iterator of tuples to be used in queries. + /// + /// The length of the output iterator is not constant and has either 0 or 1 value. + pub fn into_query_parameters( + self, + ) -> impl IntoIterator + Clone { + self.fetch_size + .map(|fetch_size| (FETCH_SIZE, fetch_size)) + .into_iter() + } +} + macro_rules! queries { ($($($meta:meta)* $item:item)+) => { pub use self::model::*; @@ -59,6 +95,9 @@ macro_rules! queries { }; } +/// Unique id of a query. +pub type QueryId = String; + /// Trait for typesafe query output pub trait Query: Into + seal::Sealed { /// Output type of query @@ -71,7 +110,7 @@ pub mod model { use iroha_crypto::HashOf; use super::*; - use crate::permission::PermissionTokenId; + use crate::{block::SignedBlock, permission::PermissionTokenId}; /// Sized container for all possible Queries. #[allow(clippy::enum_variant_names)] @@ -110,7 +149,6 @@ pub mod model { FindAssetsByDomainIdAndAssetDefinitionId(FindAssetsByDomainIdAndAssetDefinitionId), FindAssetQuantityById(FindAssetQuantityById), FindTotalAssetQuantityByAssetDefinitionId(FindTotalAssetQuantityByAssetDefinitionId), - IsAssetDefinitionOwner(IsAssetDefinitionOwner), FindAssetKeyValueByIdAndKey(FindAssetKeyValueByIdAndKey), FindAssetDefinitionKeyValueByIdAndKey(FindAssetDefinitionKeyValueByIdAndKey), FindAllDomains(FindAllDomains), @@ -143,10 +181,10 @@ pub mod model { #[getset(get = "pub")] #[ffi_type] pub struct TransactionQueryOutput { + /// The hash of the block to which `tx` belongs to + pub block_hash: HashOf, /// Transaction pub transaction: TransactionValue, - /// The hash of the block to which `tx` belongs to - pub block_hash: HashOf, } /// Type returned from [`Metadata`] queries @@ -165,6 +203,28 @@ pub mod model { )] #[ffi_type] pub struct MetadataValue(pub Value); + + /// Request type clients (like http clients or wasm) can send to a query endpoint. + #[derive(Debug, Clone, Encode, Decode, Serialize, Deserialize)] + pub enum QueryRequest { + /// Query it-self. + /// Basically used for one-time queries or to get a cursor for big queries. + Query(QueryWithParameters), + /// Cursor over previously sent [`Query`](QueryRequest::Query). + Cursor(ForwardCursor), + } + + /// Query with parameters client can specify. + #[derive( + Clone, Debug, PartialEq, Eq, Constructor, Getters, Encode, Decode, Serialize, Deserialize, + )] + #[getset(get = "pub")] + pub struct QueryWithParameters { + pub query: Q, + pub sorting: Sorting, + pub pagination: Pagination, + pub fetch_size: FetchSize, + } } impl From for Value { @@ -203,9 +263,29 @@ impl PartialOrd for TransactionQueryOutput { impl Ord for TransactionQueryOutput { #[inline] fn cmp(&self, other: &Self) -> Ordering { - self.payload() - .creation_time_ms - .cmp(&other.payload().creation_time_ms) + let tx1 = self.transaction.payload(); + let tx2 = other.transaction.payload(); + + tx1.creation_time().cmp(&tx2.creation_time()) + } +} + +impl core::fmt::Display for QueryRequest { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + match self { + Self::Query(query) => write!(f, "{query}"), + Self::Cursor(cursor) => write!(f, "{cursor:?}"), + } + } +} + +impl core::fmt::Display for QueryWithParameters { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct("QueryWithParameters") + .field("query", &self.query.to_string()) + .field("sorting", &self.sorting) + .field("pagination", &self.pagination) + .finish() } } @@ -671,16 +751,6 @@ pub mod asset { pub key: EvaluatesTo, } - /// [`IsAssetDefinitionOwner`] Iroha Query checks if provided account is the asset definition owner. - #[derive(Display)] - #[display(fmt = "Check if `{account_id}` is creator of `{asset_definition_id}` asset")] - #[ffi_type] - pub struct IsAssetDefinitionOwner { - /// `Id` of an [`AssetDefinition`] to check. - pub asset_definition_id: EvaluatesTo, - /// `Id` of a possible owner [`Account`]. - pub account_id: EvaluatesTo, - } } impl Query for FindAllAssets { type Output = Vec; @@ -734,10 +804,6 @@ pub mod asset { type Output = MetadataValue; } - impl Query for IsAssetDefinitionOwner { - type Output = bool; - } - impl FindAssetById { /// Construct [`FindAssetById`]. pub fn new(id: impl Into>) -> Self { @@ -836,19 +902,6 @@ pub mod asset { } } - impl IsAssetDefinitionOwner { - /// Construct [`IsAssetDefinitionOwner`]. - pub fn new( - asset_definition_id: impl Into>, - account_id: impl Into>, - ) -> Self { - Self { - asset_definition_id: asset_definition_id.into(), - account_id: account_id.into(), - } - } - } - /// The prelude re-exports most commonly used traits, structs and macros from this crate. pub mod prelude { pub use super::{ @@ -856,7 +909,7 @@ pub mod asset { FindAssetDefinitionKeyValueByIdAndKey, FindAssetKeyValueByIdAndKey, FindAssetQuantityById, FindAssetsByAccountId, FindAssetsByAssetDefinitionId, FindAssetsByDomainId, FindAssetsByDomainIdAndAssetDefinitionId, FindAssetsByName, - FindTotalAssetQuantityByAssetDefinitionId, IsAssetDefinitionOwner, + FindTotalAssetQuantityByAssetDefinitionId, }; } } @@ -997,8 +1050,8 @@ pub mod trigger { domain::prelude::*, events::TriggeringFilterBox, expression::EvaluatesTo, - prelude::InstructionBox, - trigger::{OptimizedExecutable, Trigger, TriggerId}, + prelude::InstructionExpr, + trigger::{Trigger, TriggerId}, Executable, Identifiable, Name, Value, }; @@ -1052,7 +1105,7 @@ pub mod trigger { } impl Query for FindTriggerById { - type Output = Trigger; + type Output = Trigger; } impl Query for FindTriggerKeyValueByIdAndKey { @@ -1060,7 +1113,7 @@ pub mod trigger { } impl Query for FindTriggersByDomainId { - type Output = Vec>; + type Output = Vec>; } impl FindTriggerById { @@ -1115,7 +1168,7 @@ pub mod transaction { use super::{Query, TransactionQueryOutput}; use crate::{ account::AccountId, expression::EvaluatesTo, prelude::Account, - transaction::VersionedSignedTransaction, + transaction::SignedTransaction, }; queries! { @@ -1142,11 +1195,11 @@ pub mod transaction { #[derive(Display)] #[display(fmt = "Find transaction with `{hash}` hash")] #[repr(transparent)] - // SAFETY: `FindTransactionByHash` has no trap representation in `EvaluatesTo>` + // SAFETY: `FindTransactionByHash` has no trap representation in `EvaluatesTo>` #[ffi_type(unsafe {robust})] pub struct FindTransactionByHash { /// Transaction hash. - pub hash: EvaluatesTo>, + pub hash: EvaluatesTo>, } } @@ -1163,7 +1216,7 @@ pub mod transaction { } impl FindTransactionsByAccountId { - ///Construct [`FindTransactionsByAccountId`]. + /// Construct [`FindTransactionsByAccountId`]. pub fn new(account_id: impl Into>) -> Self { Self { account_id: account_id.into(), @@ -1173,7 +1226,7 @@ pub mod transaction { impl FindTransactionByHash { /// Construct [`FindTransactionByHash`]. - pub fn new(hash: impl Into>>) -> Self { + pub fn new(hash: impl Into>>) -> Self { Self { hash: hash.into() } } } @@ -1197,7 +1250,7 @@ pub mod block { use super::Query; use crate::{ - block::{BlockHeader, VersionedCommittedBlock}, + block::{BlockHeader, SignedBlock}, prelude::EvaluatesTo, }; @@ -1220,16 +1273,16 @@ pub mod block { #[derive(Display)] #[display(fmt = "Find block header with `{hash}` hash")] #[repr(transparent)] - // SAFETY: `FindBlockHeaderByHash` has no trap representation in `EvaluatesTo>` + // SAFETY: `FindBlockHeaderByHash` has no trap representation in `EvaluatesTo>` #[ffi_type(unsafe {robust})] pub struct FindBlockHeaderByHash { /// Block hash. - pub hash: EvaluatesTo>, + pub hash: EvaluatesTo>, } } impl Query for FindAllBlocks { - type Output = Vec; + type Output = Vec; } impl Query for FindAllBlockHeaders { @@ -1242,7 +1295,7 @@ pub mod block { impl FindBlockHeaderByHash { /// Construct [`FindBlockHeaderByHash`]. - pub fn new(hash: impl Into>>) -> Self { + pub fn new(hash: impl Into>>) -> Self { Self { hash: hash.into() } } } @@ -1264,11 +1317,7 @@ pub mod http { use super::*; use crate::{account::AccountId, predicate::PredicateBox}; - // TODO: Could we make a variant of `Value` that holds only query results? - /// Type representing Result of executing a query - pub type QueryOutput = Value; - - declare_versioned_with_scale!(VersionedSignedQuery 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema); + declare_versioned_with_scale!(SignedQuery 1..2, Debug, Clone, iroha_macro::FromVariant, IntoSchema); #[model] pub mod model { @@ -1300,13 +1349,36 @@ pub mod http { /// I/O ready structure to send queries. #[derive(Debug, Clone, Encode, Serialize, IntoSchema)] - #[version_with_scale(version = 1, versioned_alias = "VersionedSignedQuery")] - pub struct SignedQuery { + #[version_with_scale(version = 1, versioned_alias = "SignedQuery")] + pub struct SignedQueryV1 { /// Signature of the client who sends this query. pub signature: SignatureOf, /// Payload pub payload: QueryPayload, } + + /// End type of a query http clients can send to an endpoint. + #[derive(Debug, Clone, Decode, Encode)] + pub struct ClientQueryRequest(pub QueryRequest); + } + + impl ClientQueryRequest { + /// Construct a new request containing query. + pub fn query( + query: SignedQuery, + sorting: Sorting, + pagination: Pagination, + fetch_size: FetchSize, + ) -> Self { + Self(QueryRequest::Query(QueryWithParameters::new( + query, sorting, pagination, fetch_size, + ))) + } + + /// Construct a new request containing cursor. + pub fn cursor(cursor: ForwardCursor) -> Self { + Self(QueryRequest::Cursor(cursor)) + } } mod candidate { @@ -1321,20 +1393,20 @@ pub mod http { } impl SignedQueryCandidate { - fn validate(self) -> Result { + fn validate(self) -> Result { #[cfg(feature = "std")] if self.signature.verify(&self.payload).is_err() { return Err("Query signature not valid"); } - Ok(SignedQuery { + Ok(SignedQueryV1 { payload: self.payload, signature: self.signature, }) } } - impl Decode for SignedQuery { + impl Decode for SignedQueryV1 { fn decode(input: &mut I) -> Result { SignedQueryCandidate::decode(input)? .validate() @@ -1342,7 +1414,7 @@ pub mod http { } } - impl<'de> Deserialize<'de> for SignedQuery { + impl<'de> Deserialize<'de> for SignedQueryV1 { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, @@ -1357,25 +1429,25 @@ pub mod http { } #[cfg(feature = "transparent_api")] - impl VersionedSignedQuery { + impl SignedQuery { /// Return query signature pub fn signature(&self) -> &SignatureOf { - let VersionedSignedQuery::V1(query) = self; + let SignedQuery::V1(query) = self; &query.signature } /// Return query payload pub fn query(&self) -> &QueryBox { - let VersionedSignedQuery::V1(query) = self; + let SignedQuery::V1(query) = self; &query.payload.query } /// Return query authority pub fn authority(&self) -> &AccountId { - let VersionedSignedQuery::V1(query) = self; + let SignedQuery::V1(query) = self; &query.payload.authority } /// Return query filter pub fn filter(&self) -> &PredicateBox { - let VersionedSignedQuery::V1(query) = self; + let SignedQuery::V1(query) = self; &query.payload.filter } } @@ -1407,9 +1479,9 @@ pub mod http { pub fn sign( self, key_pair: iroha_crypto::KeyPair, - ) -> Result { + ) -> Result { SignatureOf::new(key_pair, &self.payload) - .map(|signature| SignedQuery { + .map(|signature| SignedQueryV1 { payload: self.payload, signature, }) @@ -1420,7 +1492,7 @@ pub mod http { pub mod prelude { //! The prelude re-exports most commonly used traits, structs and macros from this crate. - pub use super::{QueryBuilder, SignedQuery, VersionedSignedQuery}; + pub use super::{QueryBuilder, SignedQuery, SignedQueryV1}; } } @@ -1436,7 +1508,7 @@ pub mod error { pub use self::model::*; use super::*; - use crate::{block::VersionedCommittedBlock, permission, prelude::*, validator}; + use crate::{block::SignedBlock, executor, permission, prelude::*}; #[model] pub mod model { @@ -1481,8 +1553,10 @@ pub mod error { #[skip_try_from] String, ), - /// Unauthorized query: account not provided - Unauthorized, + /// Unknown query cursor + UnknownCursor, + /// fetch_size could not be greater than {MAX_FETCH_SIZE:?} + FetchSizeTooBig, } /// Type assertion error @@ -1515,9 +1589,9 @@ pub mod error { /// Failed to find metadata key: `{0}` MetadataKey(Name), /// Block with hash `{0}` not found - Block(HashOf), + Block(HashOf), /// Transaction with hash `{0}` not found - Transaction(HashOf), + Transaction(HashOf), /// Peer with id `{0}` not found Peer(PeerId), /// Trigger with id `{0}` not found @@ -1542,6 +1616,6 @@ pub mod prelude { pub use super::{ account::prelude::*, asset::prelude::*, block::prelude::*, domain::prelude::*, peer::prelude::*, permission::prelude::*, role::prelude::*, transaction::*, - trigger::prelude::*, QueryBox, TransactionQueryOutput, + trigger::prelude::*, FetchSize, QueryBox, QueryId, TransactionQueryOutput, }; } diff --git a/data_model/src/query/pagination.rs b/data_model/src/query/pagination.rs index 71a12d95f8c..a1a6c4ad1b2 100644 --- a/data_model/src/query/pagination.rs +++ b/data_model/src/query/pagination.rs @@ -17,17 +17,14 @@ use derive_more::{Constructor, Display}; use iroha_data_model_derive::model; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; -use warp::{ - http::StatusCode, - reply::{self, Response}, - Reply, -}; const PAGINATION_START: &str = "start"; const PAGINATION_LIMIT: &str = "limit"; /// Structure for pagination requests -#[derive(Debug, Display, Clone, Copy, Default, Decode, Encode, Deserialize, Serialize)] +#[derive( + Debug, Display, Clone, Copy, PartialEq, Eq, Default, Decode, Encode, Deserialize, Serialize, +)] #[display( fmt = "{}--{}", "start.map(NonZeroU64::get).unwrap_or(0)", @@ -41,16 +38,22 @@ pub struct Pagination { pub start: Option, } -impl From for Vec<(&'static str, NonZeroU64)> { - fn from(pagination: Pagination) -> Self { - match (pagination.start, pagination.limit) { +impl Pagination { + /// Converts self to iterator of tuples to be used in queries + /// + /// The length of the output iterator is not constant and it's in (0..3) + pub fn into_query_parameters( + self, + ) -> impl IntoIterator + Clone { + let result_vec = match (self.start, self.limit) { (Some(start), Some(limit)) => { vec![(PAGINATION_LIMIT, limit.into()), (PAGINATION_START, start)] } (Some(start), None) => vec![(PAGINATION_START, start)], (None, Some(limit)) => vec![(PAGINATION_LIMIT, limit.into())], (None, None) => Vec::new(), - } + }; + result_vec.into_iter() } } diff --git a/data_model/src/query/sorting.rs b/data_model/src/query/sorting.rs index 9ccbc83c610..d6bd3a1230f 100644 --- a/data_model/src/query/sorting.rs +++ b/data_model/src/query/sorting.rs @@ -4,6 +4,7 @@ use alloc::{ format, string::{String, ToString as _}, + vec, vec::Vec, }; @@ -21,7 +22,7 @@ pub mod model { use super::*; /// Struct for sorting requests - #[derive(Debug, Clone, Default, Decode, Encode, Deserialize, Serialize)] + #[derive(Debug, Clone, Default, PartialEq, Eq, Decode, Encode, Deserialize, Serialize)] pub struct Sorting { /// Sort query result using [`Name`] of the key in [`Asset`]'s metadata. pub sort_by_metadata_key: Option, @@ -37,13 +38,14 @@ impl Sorting { } } -impl From for Vec<(&'static str, Name)> { - fn from(sorting: Sorting) -> Self { - if let Some(key) = sorting.sort_by_metadata_key { - return vec![(SORT_BY_KEY, key)]; - } - - Vec::new() +impl Sorting { + /// Converts self to iterator of tuples to be used in queries + /// + /// The length of the output iterator is not constant and has either 0 or 1 value + pub fn into_query_parameters(self) -> impl IntoIterator + Clone { + self.sort_by_metadata_key + .map(|key| (SORT_BY_KEY, key)) + .into_iter() } } diff --git a/data_model/src/role.rs b/data_model/src/role.rs index 68f23499373..eedf399030c 100644 --- a/data_model/src/role.rs +++ b/data_model/src/role.rs @@ -1,9 +1,7 @@ //! Structures, traits and impls related to `Role`s. #[cfg(not(feature = "std"))] -use alloc::{collections::btree_set, format, string::String, vec::Vec}; -#[cfg(feature = "std")] -use std::collections::btree_set; +use alloc::{format, string::String, vec::Vec}; use derive_more::{Constructor, Display, FromStr}; use getset::Getters; @@ -18,9 +16,6 @@ use crate::{ Identifiable, Name, Registered, }; -/// Collection of [`RoleId`](Id)s -pub type RoleIds = btree_set::BTreeSet; - #[model] pub mod model { use super::*; diff --git a/data_model/src/smart_contract.rs b/data_model/src/smart_contract.rs new file mode 100644 index 00000000000..0800fbc27df --- /dev/null +++ b/data_model/src/smart_contract.rs @@ -0,0 +1,106 @@ +//! This module contains data and structures related only to smart contract execution + +use parity_scale_codec::{Decode, Encode}; + +pub use self::model::*; +use crate::{ + prelude::FetchSize, + query::{ + cursor::ForwardCursor, sorting::Sorting, Pagination, QueryBox, QueryRequest, + QueryWithParameters, + }, +}; + +pub mod payloads { + //! Payloads with function arguments for different entrypoints + + use parity_scale_codec::{Decode, Encode}; + + use crate::prelude::*; + + /// Payload for smart contract entrypoint + #[derive(Debug, Clone, Encode, Decode)] + pub struct SmartContract { + /// Smart contract owner who submitted transaction with it + pub owner: AccountId, + } + + /// Payload for trigger entrypoint + #[derive(Debug, Clone, Encode, Decode)] + pub struct Trigger { + /// Trigger owner who registered the trigger + pub owner: AccountId, + /// Event which triggered the execution + pub event: Event, + } + + /// Payload for migrate entrypoint + #[derive(Debug, Clone, Copy, Encode, Decode)] + pub struct Migrate { + /// Height of the latest block in the blockchain + pub block_height: u64, + } + + /// Generic payload for `validate_*()` entrypoints of executor. + #[derive(Debug, Clone, Encode, Decode)] + pub struct Validate { + /// Authority which executed the operation to be validated + pub authority: AccountId, + /// Height of the latest block in the blockchain + pub block_height: u64, + /// Operation to be validated + pub to_validate: T, + } +} + +#[crate::model] +pub mod model { + use super::*; + + /// Request type for `execute_query()` function. + #[derive(Debug, derive_more::Display, Clone, Decode, Encode)] + pub struct SmartContractQueryRequest(pub QueryRequest); +} + +impl SmartContractQueryRequest { + /// Construct a new request containing query. + pub fn query( + query: QueryBox, + sorting: Sorting, + pagination: Pagination, + fetch_size: FetchSize, + ) -> Self { + Self(QueryRequest::Query(QueryWithParameters::new( + query, sorting, pagination, fetch_size, + ))) + } + + /// Construct a new request containing cursor. + pub fn cursor(cursor: ForwardCursor) -> Self { + Self(QueryRequest::Cursor(cursor)) + } + + /// Unwrap [`Self`] if it was previously constructed with [`query()`](Self::query). + /// + /// # Panics + /// + /// Panics if [`Self`] was constructed with [`cursor()`](Self::cursor). + pub fn unwrap_query(self) -> (QueryBox, Sorting, Pagination) { + match self.0 { + QueryRequest::Query(query) => (query.query, query.sorting, query.pagination), + QueryRequest::Cursor(_) => panic!("Expected query, got cursor"), + } + } + + /// Unwrap [`Self`] if it was previously constructed with [`cursor()`](Self::cursor). + /// + /// # Panics + /// + /// Panics if [`Self`] was constructed with [`query()`](Self::query). + pub fn unwrap_cursor(self) -> ForwardCursor { + match self.0 { + QueryRequest::Query(_) => panic!("Expected cursor, got query"), + QueryRequest::Cursor(cursor) => cursor, + } + } +} diff --git a/data_model/src/transaction.rs b/data_model/src/transaction.rs index d73f4f4c0e5..a7742873c7a 100644 --- a/data_model/src/transaction.rs +++ b/data_model/src/transaction.rs @@ -1,5 +1,4 @@ //! [`Transaction`] structures and related implementations. -#![allow(clippy::std_instead_of_core)] #[cfg(not(feature = "std"))] use alloc::{boxed::Box, format, string::String, vec::Vec}; use core::{ @@ -23,7 +22,7 @@ use serde::{Deserialize, Serialize}; pub use self::model::*; use crate::{ account::AccountId, - isi::{Instruction, InstructionBox}, + isi::{Instruction, InstructionExpr}, metadata::UnlimitedMetadata, name::Name, Value, @@ -52,7 +51,7 @@ pub mod model { pub enum Executable { /// Ordered set of instructions. #[debug(fmt = "{_0:?}")] - Instructions(Vec), + Instructions(Vec), /// WebAssembly smartcontract Wasm(WasmSmartContract), } @@ -154,7 +153,7 @@ pub mod model { /// After a transaction is signed and before it can be processed any further, /// the transaction must be accepted by the `Iroha` peer. /// The peer verifies the signatures and checks the limits. - #[version(version = 1, versioned_alias = "VersionedSignedTransaction")] + #[version(version = 1, versioned_alias = "SignedTransaction")] #[derive( Debug, Display, Clone, PartialEq, Eq, PartialOrd, Ord, Encode, Serialize, IntoSchema, )] @@ -162,7 +161,7 @@ pub mod model { #[cfg_attr(feature = "std", display(fmt = "{}", "self.hash()"))] #[ffi_type] // TODO: All fields in this struct should be private - pub struct SignedTransaction { + pub struct SignedTransactionV1 { /// [`iroha_crypto::SignatureOf`]<[`TransactionPayload`]>. pub signatures: SignaturesOf, /// [`Transaction`] payload. @@ -174,7 +173,7 @@ pub mod model { #[ffi_type] pub struct TransactionValue { /// Committed transaction - pub value: VersionedSignedTransaction, + pub value: SignedTransaction, /// Reason of rejection pub error: Option, } @@ -253,22 +252,37 @@ impl TransactionPayload { } #[cfg(any(feature = "ffi_export", feature = "ffi_import"))] -declare_versioned!(VersionedSignedTransaction 1..2, Debug, Display, Clone, PartialEq, Eq, PartialOrd, Ord, FromVariant, iroha_ffi::FfiType, IntoSchema); +declare_versioned!(SignedTransaction 1..2, Debug, Display, Clone, PartialEq, Eq, PartialOrd, Ord, FromVariant, iroha_ffi::FfiType, IntoSchema); #[cfg(all(not(feature = "ffi_export"), not(feature = "ffi_import")))] -declare_versioned!(VersionedSignedTransaction 1..2, Debug, Display, Clone, PartialEq, Eq, PartialOrd, Ord, FromVariant, IntoSchema); +declare_versioned!(SignedTransaction 1..2, Debug, Display, Clone, PartialEq, Eq, PartialOrd, Ord, FromVariant, IntoSchema); -impl VersionedSignedTransaction { +impl SignedTransaction { /// Return transaction payload - // TODO: Leaking concrete type TransactionPayload from Versioned container. Payload should be versioned + // FIXME: Leaking concrete type TransactionPayload from Versioned container. Payload should be versioned pub fn payload(&self) -> &TransactionPayload { - let VersionedSignedTransaction::V1(tx) = self; - tx.payload() + let SignedTransaction::V1(tx) = self; + &tx.payload + } + + /// Used to inject faulty payload for testing + #[cfg(feature = "transparent_api")] + pub fn payload_mut(&mut self) -> &mut TransactionPayload { + let SignedTransaction::V1(tx) = self; + &mut tx.payload + } + + /// Used to inject faulty payload for testing + #[cfg(debug_assertions)] + #[cfg(not(feature = "transparent_api"))] + pub fn payload_mut(&mut self) -> &mut TransactionPayload { + let SignedTransaction::V1(tx) = self; + &mut tx.payload } /// Return transaction signatures pub fn signatures(&self) -> &SignaturesOf { - let VersionedSignedTransaction::V1(tx) = self; - tx.signatures() + let SignedTransaction::V1(tx) = self; + &tx.signatures } /// Calculate transaction [`Hash`](`iroha_crypto::HashOf`). @@ -286,12 +300,12 @@ impl VersionedSignedTransaction { pub fn sign( self, key_pair: iroha_crypto::KeyPair, - ) -> Result { - let VersionedSignedTransaction::V1(mut tx) = self; + ) -> Result { + let SignedTransaction::V1(mut tx) = self; let signature = iroha_crypto::SignatureOf::new(key_pair, &tx.payload)?; tx.signatures.insert(signature); - Ok(SignedTransaction { + Ok(SignedTransactionV1 { payload: tx.payload, signatures: tx.signatures, } @@ -299,10 +313,6 @@ impl VersionedSignedTransaction { } /// Add additional signatures to this transaction - /// - /// # Errors - /// - /// - if signature verification fails #[cfg(feature = "std")] #[cfg(feature = "transparent_api")] pub fn merge_signatures(&mut self, other: Self) -> bool { @@ -310,8 +320,8 @@ impl VersionedSignedTransaction { return false; } - let VersionedSignedTransaction::V1(tx1) = self; - let VersionedSignedTransaction::V1(tx2) = other; + let SignedTransaction::V1(tx1) = self; + let SignedTransaction::V1(tx2) = other; tx1.signatures.extend(tx2.signatures); true @@ -319,36 +329,38 @@ impl VersionedSignedTransaction { } #[cfg(feature = "transparent_api")] -impl From for (AccountId, Executable) { - fn from(source: VersionedSignedTransaction) -> Self { - let VersionedSignedTransaction::V1(tx) = source; +impl From for (AccountId, Executable) { + fn from(source: SignedTransaction) -> Self { + let SignedTransaction::V1(tx) = source; (tx.payload.authority, tx.payload.instructions) } } -impl SignedTransaction { +impl SignedTransactionV1 { #[cfg(feature = "std")] - fn hash(&self) -> iroha_crypto::HashOf { - // TODO: Redundant clone. How to construct a versioned reference? - // or should we return HashOf - iroha_crypto::HashOf::new(&self.clone().into()) - } - - fn payload(&self) -> &TransactionPayload { - &self.payload - } - - fn signatures(&self) -> &SignaturesOf { - &self.signatures + fn hash(&self) -> iroha_crypto::HashOf { + iroha_crypto::HashOf::from_untyped_unchecked(iroha_crypto::HashOf::new(self).into()) } } impl TransactionValue { - /// Used to return payload of the transaction + /// Calculate transaction [`Hash`](`iroha_crypto::HashOf`). + #[cfg(feature = "std")] + pub fn hash(&self) -> iroha_crypto::HashOf { + self.value.hash() + } + + /// [`Transaction`] payload. #[inline] pub fn payload(&self) -> &TransactionPayload { self.value.payload() } + + /// [`iroha_crypto::SignatureOf`]<[`TransactionPayload`]>. + #[inline] + pub fn signatures(&self) -> &SignaturesOf { + self.value.signatures() + } } impl PartialOrd for TransactionValue { @@ -380,24 +392,24 @@ mod candidate { impl SignedTransactionCandidate { #[cfg(feature = "std")] - fn validate(self) -> Result { + fn validate(self) -> Result { self.validate_signatures()?; self.validate_instructions() } #[cfg(not(feature = "std"))] - fn validate(self) -> Result { + fn validate(self) -> Result { self.validate_instructions() } - fn validate_instructions(self) -> Result { + fn validate_instructions(self) -> Result { if let Executable::Instructions(instructions) = &self.payload.instructions { if instructions.is_empty() { return Err("Transaction is empty"); } } - Ok(SignedTransaction { + Ok(SignedTransactionV1 { payload: self.payload, signatures: self.signatures, }) @@ -406,19 +418,19 @@ mod candidate { #[cfg(feature = "std")] fn validate_signatures(&self) -> Result<(), &'static str> { self.signatures - .verify_hash(self.payload.hash()) + .verify(&self.payload) .map_err(|_| "Transaction contains invalid signatures") } } - impl Decode for SignedTransaction { + impl Decode for SignedTransactionV1 { fn decode(input: &mut I) -> Result { SignedTransactionCandidate::decode(input)? .validate() .map_err(Into::into) } } - impl<'de> Deserialize<'de> for SignedTransaction { + impl<'de> Deserialize<'de> for SignedTransactionV1 { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de>, @@ -445,10 +457,8 @@ mod base64 { /// Serialize bytes using `base64` pub fn serialize(bytes: &[u8], serializer: S) -> Result { - serializer.collect_str(&base64::display::Base64Display::with_config( - bytes, - base64::STANDARD, - )) + let engine = base64::engine::general_purpose::STANDARD; + serializer.collect_str(&base64::display::Base64Display::new(bytes, &engine)) } /// Deserialize bytes using `base64` @@ -463,7 +473,8 @@ mod base64 { } fn visit_str(self, v: &str) -> Result { - base64::decode(v).map_err(serde::de::Error::custom) + let engine = base64::engine::general_purpose::STANDARD; + base64::engine::Engine::decode(&engine, v).map_err(serde::de::Error::custom) } } deserializer.deserialize_str(Visitor) @@ -522,7 +533,7 @@ pub mod error { pub struct InstructionExecutionFail { /// Instruction for which execution failed #[getset(get = "pub")] - pub instruction: InstructionBox, + pub instruction: InstructionExpr, /// Error which happened during execution pub reason: String, } @@ -593,8 +604,6 @@ pub mod error { InstructionExecution(#[cfg_attr(feature = "std", source)] InstructionExecutionFail), /// Failure in WebAssembly execution WasmExecution(#[cfg_attr(feature = "std", source)] WasmExecutionFail), - /// The genesis account can only sign transactions in the genesis block - UnexpectedGenesisAccountSignature, /// Transaction rejected due to being expired Expired, } @@ -602,7 +611,7 @@ pub mod error { impl Display for InstructionExecutionFail { fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { - use InstructionBox::*; + use InstructionExpr::*; let kind = match self.instruction { Burn(_) => "burn", Fail(_) => "fail", @@ -682,7 +691,7 @@ mod http { creation_time_ms, nonce: None, time_to_live_ms: None, - instructions: Vec::::new().into(), + instructions: Vec::::new().into(), metadata: UnlimitedMetadata::new(), }, } @@ -698,7 +707,7 @@ mod http { self.payload.instructions = instructions .into_iter() .map(Into::into) - .collect::>() + .collect::>() .into(); self } @@ -747,10 +756,10 @@ mod http { pub fn sign( self, key_pair: iroha_crypto::KeyPair, - ) -> Result { + ) -> Result { let signatures = SignaturesOf::new(key_pair, &self.payload)?; - Ok(SignedTransaction { + Ok(SignedTransactionV1 { payload: self.payload, signatures, } @@ -764,15 +773,13 @@ pub mod prelude { #[cfg(feature = "http")] pub use super::http::TransactionBuilder; pub use super::{ - error::prelude::*, Executable, TransactionPayload, TransactionValue, - VersionedSignedTransaction, WasmSmartContract, + error::prelude::*, Executable, SignedTransaction, TransactionPayload, TransactionValue, + WasmSmartContract, }; } #[cfg(test)] mod tests { - #![allow(clippy::pedantic, clippy::restriction)] - use super::*; #[test] diff --git a/data_model/src/trigger.rs b/data_model/src/trigger.rs index fd13104501f..0127ce3bf77 100644 --- a/data_model/src/trigger.rs +++ b/data_model/src/trigger.rs @@ -7,7 +7,7 @@ use core::{cmp, str::FromStr}; use derive_more::{Constructor, Display}; use getset::Getters; use iroha_data_model_derive::{model, IdEqOrdHash}; -use iroha_macro::{ffi_impl_opaque, FromVariant}; +use iroha_macro::ffi_impl_opaque; use iroha_schema::IntoSchema; use parity_scale_codec::{Decode, Encode}; use serde::{Deserialize, Serialize}; @@ -15,8 +15,8 @@ use serde_with::{DeserializeFromStr, SerializeDisplay}; pub use self::model::*; use crate::{ - domain::DomainId, events::prelude::*, metadata::Metadata, prelude::InstructionBox, - transaction::Executable, Identifiable, Name, ParseError, Registered, + domain::DomainId, events::prelude::*, metadata::Metadata, transaction::Executable, + Identifiable, Name, ParseError, Registered, }; #[model] @@ -43,10 +43,10 @@ pub mod model { #[getset(get = "pub")] #[ffi_type] pub struct TriggerId { - /// Name given to trigger by its creator. - pub name: Name, /// DomainId of domain of the trigger. pub domain_id: Option, + /// Name given to trigger by its creator. + pub name: Name, } /// Type which is used for registering a `Trigger`. @@ -64,62 +64,38 @@ pub mod model { )] #[display(fmt = "@@{id}")] #[ffi_type] - pub struct Trigger { + pub struct Trigger { /// [`Id`] of the [`Trigger`]. pub id: TriggerId, /// Action to be performed when the trigger matches. - pub action: action::Action, - } - - /// Internal representation of Wasm blob provided by preloading it with `wasmtime` crate. - #[derive(Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema)] - pub struct WasmInternalRepr { - /// Serialized with `wasmtime::Module::serialize` - pub serialized: Vec, - /// Hash of original WASM blob on blockchain - pub blob_hash: iroha_crypto::HashOf, - } - - /// Same as [`Executable`] but instead of [`Wasm`](Executable::Wasm) contains - /// [`WasmInternalRepr`] with serialized optimized representation - /// from `wasmtime` library. - #[derive( - Debug, Clone, PartialEq, Eq, FromVariant, Decode, Encode, Deserialize, Serialize, IntoSchema, - )] - // TODO: Made opaque temporarily - #[ffi_type(opaque)] - pub enum OptimizedExecutable { - /// WASM serialized with `wasmtime`. - WasmInternalRepr(WasmInternalRepr), - /// Vector of [`instructions`](InstructionBox). - Instructions(Vec), + pub action: action::Action, } } #[ffi_impl_opaque] -impl Trigger { +impl Trigger { /// [`Id`] of the [`Trigger`]. pub fn id(&self) -> &TriggerId { &self.id } /// Action to be performed when the trigger matches. - pub fn action(&self) -> &action::Action { + pub fn action(&self) -> &action::Action { &self.action } } -impl Registered for Trigger { +impl Registered for Trigger { type With = Self; } macro_rules! impl_try_from_box { ($($variant:ident => $filter_type:ty),+ $(,)?) => { $( - impl TryFrom> for Trigger<$filter_type, E> { + impl TryFrom> for Trigger<$filter_type> { type Error = &'static str; - fn try_from(boxed: Trigger) -> Result { + fn try_from(boxed: Trigger) -> Result { if let TriggeringFilterBox::$variant(concrete_filter) = boxed.action.filter { let action = action::Action::new( boxed.action.executable, @@ -210,9 +186,9 @@ pub mod action { Debug, Clone, PartialEq, Eq, Decode, Encode, Deserialize, Serialize, IntoSchema, )] #[ffi_type] - pub struct Action { + pub struct Action { /// The executable linked to this action - pub executable: E, + pub executable: Executable, /// The repeating scheme of the action. It's kept as part of the /// action and not inside the [`Trigger`] type, so that further /// sanity checking can be done. @@ -239,16 +215,16 @@ pub mod action { } #[cfg(feature = "transparent_api")] - impl crate::HasMetadata for Action { + impl crate::HasMetadata for Action { fn metadata(&self) -> &crate::metadata::Metadata { &self.metadata } } #[ffi_impl_opaque] - impl Action { + impl Action { /// The executable linked to this action - pub fn executable(&self) -> &OptimizedExecutable { + pub fn executable(&self) -> &Executable { &self.executable } /// The repeating scheme of the action. It's kept as part of the @@ -267,10 +243,10 @@ pub mod action { } } - impl Action { + impl Action { /// Construct an action given `executable`, `repeats`, `authority` and `filter`. pub fn new( - executable: impl Into, + executable: impl Into, repeats: impl Into, authority: AccountId, filter: F, @@ -293,7 +269,7 @@ pub mod action { } } - impl PartialOrd for Action { + impl PartialOrd for Action { fn partial_cmp(&self, other: &Self) -> Option { // Exclude the executable. When debugging and replacing // the trigger, its position in Hash and Tree maps should @@ -306,94 +282,13 @@ pub mod action { } } - #[allow(clippy::expect_used)] - impl Ord for Action { + impl Ord for Action { fn cmp(&self, other: &Self) -> cmp::Ordering { self.partial_cmp(other) .expect("`PartialCmp::partial_cmp()` for `Action` should never return `None`") } } - /// Trait for common methods for all [`Action`]'s - #[cfg(feature = "transparent_api")] - pub trait ActionTrait { - /// Type of action executable - type Executable; - - /// Get action executable - fn executable(&self) -> &Self::Executable; - - /// Get action repeats enum - fn repeats(&self) -> &Repeats; - - /// Set action repeats - fn set_repeats(&mut self, repeats: Repeats); - - /// Get action technical account - fn authority(&self) -> &AccountId; - - /// Get action metadata - fn metadata(&self) -> &Metadata; - - /// Check if action is mintable. - fn mintable(&self) -> bool; - - /// Convert action to a boxed representation - fn into_boxed(self) -> Action; - - /// Same as [`into_boxed()`](ActionTrait::into_boxed) but clones `self` - fn clone_and_box(&self) -> Action; - } - - #[cfg(feature = "transparent_api")] - impl + Clone, E: Clone> ActionTrait for Action { - type Executable = E; - - fn executable(&self) -> &Self::Executable { - &self.executable - } - - fn repeats(&self) -> &Repeats { - &self.repeats - } - - fn set_repeats(&mut self, repeats: Repeats) { - self.repeats = repeats; - } - - fn authority(&self) -> &AccountId { - &self.authority - } - - fn metadata(&self) -> &Metadata { - &self.metadata - } - - fn mintable(&self) -> bool { - self.filter.mintable() - } - - fn into_boxed(self) -> Action { - Action:: { - executable: self.executable, - repeats: self.repeats, - authority: self.authority, - filter: self.filter.into(), - metadata: self.metadata, - } - } - - fn clone_and_box(&self) -> Action { - Action:: { - executable: self.executable.clone(), - repeats: self.repeats.clone(), - authority: self.authority.clone(), - filter: self.filter.clone().into(), - metadata: self.metadata.clone(), - } - } - } - impl PartialOrd for Repeats { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) @@ -426,8 +321,6 @@ pub mod action { pub mod prelude { //! Re-exports of commonly used types. - #[cfg(feature = "transparent_api")] - pub use super::action::ActionTrait; pub use super::{action::prelude::*, Trigger, TriggerId}; } @@ -438,26 +331,20 @@ mod tests { #[test] fn trigger_with_filterbox_can_be_unboxed() { /// Should fail to compile if a new variant will be added to `TriggeringFilterBox` - #[allow(dead_code, clippy::unwrap_used)] - fn compile_time_check(boxed: Trigger) { + #[allow(dead_code)] + fn compile_time_check(boxed: Trigger) { match &boxed.action.filter { - TriggeringFilterBox::Data(_) => { - Trigger::::try_from(boxed) - .map(|_| ()) - .unwrap() - } - TriggeringFilterBox::Pipeline(_) => { - Trigger::::try_from(boxed) - .map(|_| ()) - .unwrap() - } - TriggeringFilterBox::Time(_) => { - Trigger::::try_from(boxed) - .map(|_| ()) - .unwrap() - } + TriggeringFilterBox::Data(_) => Trigger::::try_from(boxed) + .map(|_| ()) + .unwrap(), + TriggeringFilterBox::Pipeline(_) => Trigger::::try_from(boxed) + .map(|_| ()) + .unwrap(), + TriggeringFilterBox::Time(_) => Trigger::::try_from(boxed) + .map(|_| ()) + .unwrap(), TriggeringFilterBox::ExecuteTrigger(_) => { - Trigger::::try_from(boxed) + Trigger::::try_from(boxed) .map(|_| ()) .unwrap() } diff --git a/data_model/src/visit.rs b/data_model/src/visit.rs index 1f33124d6fa..62a33f328c8 100644 --- a/data_model/src/visit.rs +++ b/data_model/src/visit.rs @@ -38,27 +38,27 @@ pub trait Visit: ExpressionEvaluator { visit_unsupported(T), // Visit SignedTransaction - visit_transaction(&VersionedSignedTransaction), - visit_instruction(&InstructionBox), + visit_transaction(&SignedTransaction), + visit_instruction(&InstructionExpr), visit_expression(&EvaluatesTo), visit_wasm(&WasmSmartContract), visit_query(&QueryBox), - // Visit InstructionBox - visit_burn(&BurnBox), - visit_fail(&FailBox), - visit_grant(&GrantBox), - visit_if(&Conditional), - visit_mint(&MintBox), - visit_pair(&Pair), - visit_register(&RegisterBox), - visit_remove_key_value(&RemoveKeyValueBox), - visit_revoke(&RevokeBox), - visit_sequence(&SequenceBox), - visit_set_key_value(&SetKeyValueBox), - visit_transfer(&TransferBox), - visit_unregister(&UnregisterBox), - visit_upgrade(&UpgradeBox), + // Visit InstructionExpr + visit_burn(&BurnExpr), + visit_fail(&Fail), + visit_grant(&GrantExpr), + visit_if(&ConditionalExpr), + visit_mint(&MintExpr), + visit_pair(&PairExpr), + visit_register(&RegisterExpr), + visit_remove_key_value(&RemoveKeyValueExpr), + visit_revoke(&RevokeExpr), + visit_sequence(&SequenceExpr), + visit_set_key_value(&SetKeyValueExpr), + visit_transfer(&TransferExpr), + visit_unregister(&UnregisterExpr), + visit_upgrade(&UpgradeExpr), visit_execute_trigger(ExecuteTrigger), visit_new_parameter(NewParameter), @@ -106,18 +106,17 @@ pub trait Visit: ExpressionEvaluator { visit_find_trigger_by_id(&FindTriggerById), visit_find_trigger_key_value_by_id_and_key(&FindTriggerKeyValueByIdAndKey), visit_find_triggers_by_domain_id(&FindTriggersByDomainId), - visit_is_asset_definition_owner(&IsAssetDefinitionOwner), - // Visit RegisterBox + // Visit RegisterExpr visit_register_peer(Register), visit_register_domain(Register), visit_register_account(Register), visit_register_asset_definition(Register), visit_register_asset(Register), visit_register_role(Register), - visit_register_trigger(Register>), + visit_register_trigger(Register>), - // Visit UnregisterBox + // Visit UnregisterExpr visit_unregister_peer(Unregister), visit_unregister_domain(Unregister), visit_unregister_account(Unregister), @@ -125,44 +124,46 @@ pub trait Visit: ExpressionEvaluator { visit_unregister_asset(Unregister), // TODO: Need to allow role creator to unregister it somehow visit_unregister_role(Unregister), - visit_unregister_trigger(Unregister>), + visit_unregister_trigger(Unregister>), - // Visit MintBox - visit_mint_asset(Mint), - visit_mint_account_public_key(Mint), - visit_mint_account_signature_check_condition(Mint), - visit_mint_trigger_repetitions(Mint, u32>), + // Visit MintExpr + visit_mint_asset(Mint), + visit_mint_account_public_key(Mint), + visit_mint_account_signature_check_condition(Mint), + visit_mint_trigger_repetitions(Mint>), - // Visit BurnBox - visit_burn_account_public_key(Burn), - visit_burn_asset(Burn), + // Visit BurnExpr + visit_burn_account_public_key(Burn), + visit_burn_asset(Burn), + visit_burn_trigger_repetitions(Burn>), - // Visit TransferBox - visit_transfer_asset_definition(Transfer), + // Visit TransferExpr + visit_transfer_asset_definition(Transfer), visit_transfer_asset(Transfer), + visit_transfer_domain(Transfer), - // Visit SetKeyValueBox + // Visit SetKeyValueExpr visit_set_domain_key_value(SetKeyValue), visit_set_account_key_value(SetKeyValue), visit_set_asset_definition_key_value(SetKeyValue), visit_set_asset_key_value(SetKeyValue), - // Visit RemoveKeyValueBox + // Visit RemoveKeyValueExpr visit_remove_domain_key_value(RemoveKeyValue), visit_remove_account_key_value(RemoveKeyValue), visit_remove_asset_definition_key_value(RemoveKeyValue), visit_remove_asset_key_value(RemoveKeyValue), - // Visit GrantBox - visit_grant_account_permission(Grant), - visit_grant_account_role(Grant), + // Visit GrantExpr + visit_grant_account_permission(Grant), + visit_grant_account_role(Grant), - // Visit RevokeBox - visit_revoke_account_permission(Revoke), - visit_revoke_account_role(Revoke), + // Visit RevokeExpr + visit_revoke_account_permission(Revoke), + visit_revoke_account_role(Revoke), - // Visit UpgradeBox - visit_upgrade_validator(Upgrade), + // Visit UpgradeExpr + visit_upgrade_executor(Upgrade), } } @@ -177,7 +178,7 @@ fn visit_unsupported( pub fn visit_transaction( visitor: &mut V, authority: &AccountId, - transaction: &VersionedSignedTransaction, + transaction: &SignedTransaction, ) { match transaction.payload().instructions() { Executable::Wasm(wasm) => visitor.visit_wasm(authority, wasm), @@ -240,7 +241,6 @@ pub fn visit_query(visitor: &mut V, authority: &AccountId, qu visit_find_trigger_by_id(FindTriggerById), visit_find_trigger_key_value_by_id_and_key(FindTriggerKeyValueByIdAndKey), visit_find_triggers_by_domain_id(FindTriggersByDomainId), - visit_is_asset_definition_owner(IsAssetDefinitionOwner), } } @@ -251,7 +251,7 @@ pub fn visit_wasm( ) { } -/// Default validation for [`InstructionBox`]. +/// Default validation for [`InstructionExpr`]. /// /// # Warning /// @@ -259,29 +259,29 @@ pub fn visit_wasm( pub fn visit_instruction( visitor: &mut V, authority: &AccountId, - isi: &InstructionBox, + isi: &InstructionExpr, ) { macro_rules! isi_visitors { ( $($visitor:ident($isi:ident)),+ $(,)? ) => { match isi { - InstructionBox::NewParameter(isi) => { + InstructionExpr::NewParameter(isi) => { let parameter = evaluate_expr!(visitor, authority, ::parameter()); visitor.visit_new_parameter(authority, NewParameter{parameter}); } - InstructionBox::SetParameter(isi) => { + InstructionExpr::SetParameter(isi) => { let parameter = evaluate_expr!(visitor, authority, ::parameter()); visitor.visit_set_parameter(authority, SetParameter{parameter}); } - InstructionBox::ExecuteTrigger(isi) => { + InstructionExpr::ExecuteTrigger(isi) => { let trigger_id = evaluate_expr!(visitor, authority, ::trigger_id()); visitor.visit_execute_trigger(authority, ExecuteTrigger{trigger_id}); } - InstructionBox::Log(isi) => { - let msg = evaluate_expr!(visitor, authority, ::msg()); - let level = evaluate_expr!(visitor, authority, ::level()); + InstructionExpr::Log(isi) => { + let msg = evaluate_expr!(visitor, authority, ::msg()); + let level = evaluate_expr!(visitor, authority, ::level()); visitor.visit_log(authority, Log { msg, level }); } $( - InstructionBox::$isi(isi) => { + InstructionExpr::$isi(isi) => { visitor.$visitor(authority, isi); } )+ } @@ -341,19 +341,19 @@ pub fn visit_expression( Expression::If(expr) => { visitor.visit_expression(authority, expr.condition()); visitor.visit_expression(authority, expr.then()); - visitor.visit_expression(authority, expr.otherwise()) + visitor.visit_expression(authority, expr.otherwise()); } Expression::Contains(expr) => { visitor.visit_expression(authority, expr.collection()); - visitor.visit_expression(authority, expr.element()) + visitor.visit_expression(authority, expr.element()); } Expression::ContainsAll(expr) => { visitor.visit_expression(authority, expr.collection()); - visitor.visit_expression(authority, expr.elements()) + visitor.visit_expression(authority, expr.elements()); } Expression::ContainsAny(expr) => { visitor.visit_expression(authority, expr.collection()); - visitor.visit_expression(authority, expr.elements()) + visitor.visit_expression(authority, expr.elements()); } Expression::Where(expr) => visitor.visit_expression(authority, expr.expression()), Expression::Query(query) => visitor.visit_query(authority, query), @@ -364,7 +364,7 @@ pub fn visit_expression( pub fn visit_register( visitor: &mut V, authority: &AccountId, - isi: &RegisterBox, + isi: &RegisterExpr, ) { macro_rules! match_all { ( $( $visitor:ident($object:ident) ),+ $(,)? ) => { @@ -390,7 +390,7 @@ pub fn visit_register( pub fn visit_unregister( visitor: &mut V, authority: &AccountId, - isi: &UnregisterBox, + isi: &UnregisterExpr, ) { macro_rules! match_all { ( $( $visitor:ident($id:ident) ),+ $(,)? ) => { @@ -413,7 +413,7 @@ pub fn visit_unregister( } } -pub fn visit_mint(visitor: &mut V, authority: &AccountId, isi: &MintBox) { +pub fn visit_mint(visitor: &mut V, authority: &AccountId, isi: &MintExpr) { let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); let object = evaluate_expr!(visitor, authority, ::object()); @@ -453,7 +453,7 @@ pub fn visit_mint(visitor: &mut V, authority: &AccountId, isi } } -pub fn visit_burn(visitor: &mut V, authority: &AccountId, isi: &BurnBox) { +pub fn visit_burn(visitor: &mut V, authority: &AccountId, isi: &BurnExpr) { let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); let object = evaluate_expr!(visitor, authority, ::object()); @@ -473,6 +473,14 @@ pub fn visit_burn(visitor: &mut V, authority: &AccountId, isi destination_id, }, ), + (IdBox::TriggerId(destination_id), Value::Numeric(NumericValue::U32(object))) => visitor + .visit_burn_trigger_repetitions( + authority, + Burn { + object, + destination_id, + }, + ), _ => visitor.visit_unsupported(authority, isi), } } @@ -480,19 +488,40 @@ pub fn visit_burn(visitor: &mut V, authority: &AccountId, isi pub fn visit_transfer( visitor: &mut V, authority: &AccountId, - isi: &TransferBox, + isi: &TransferExpr, ) { let object = evaluate_expr!(visitor, authority, ::object()); + let source_id = evaluate_expr!(visitor, authority, ::source_id()); + let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); - let (IdBox::AssetId(source_id), IdBox::AccountId(destination_id)) = ( - evaluate_expr!(visitor, authority, ::source_id()), - evaluate_expr!(visitor, authority, ::destination_id()), - ) else { - return visitor.visit_unsupported(authority, isi); - }; - - match object { - Value::Numeric(object) => visitor.visit_transfer_asset( + match (source_id, object, destination_id) { + (IdBox::AssetId(source_id), Value::Numeric(object), IdBox::AccountId(destination_id)) => { + visitor.visit_transfer_asset( + authority, + Transfer { + source_id, + object, + destination_id, + }, + ); + } + ( + IdBox::AccountId(source_id), + Value::Id(IdBox::AssetDefinitionId(object)), + IdBox::AccountId(destination_id), + ) => visitor.visit_transfer_asset_definition( + authority, + Transfer { + source_id, + object, + destination_id, + }, + ), + ( + IdBox::AccountId(source_id), + Value::Id(IdBox::DomainId(object)), + IdBox::AccountId(destination_id), + ) => visitor.visit_transfer_domain( authority, Transfer { source_id, @@ -507,7 +536,7 @@ pub fn visit_transfer( pub fn visit_set_key_value( visitor: &mut V, authority: &AccountId, - isi: &SetKeyValueBox, + isi: &SetKeyValueExpr, ) { let object_id = evaluate_expr!(visitor, authority, ::object_id()); let key = evaluate_expr!(visitor, authority, ::key()); @@ -553,91 +582,87 @@ pub fn visit_set_key_value( pub fn visit_remove_key_value( visitor: &mut V, authority: &AccountId, - isi: &RemoveKeyValueBox, + isi: &RemoveKeyValueExpr, ) { let object_id = evaluate_expr!(visitor, authority, ::object_id()); let key = evaluate_expr!(visitor, authority, ::key()); match object_id { IdBox::AssetId(object_id) => { - visitor.visit_remove_asset_key_value(authority, RemoveKeyValue { object_id, key }) + visitor.visit_remove_asset_key_value(authority, RemoveKeyValue { object_id, key }); } IdBox::AssetDefinitionId(object_id) => visitor .visit_remove_asset_definition_key_value(authority, RemoveKeyValue { object_id, key }), IdBox::AccountId(object_id) => { - visitor.visit_remove_account_key_value(authority, RemoveKeyValue { object_id, key }) + visitor.visit_remove_account_key_value(authority, RemoveKeyValue { object_id, key }); } IdBox::DomainId(object_id) => { - visitor.visit_remove_domain_key_value(authority, RemoveKeyValue { object_id, key }) + visitor.visit_remove_domain_key_value(authority, RemoveKeyValue { object_id, key }); } _ => visitor.visit_unsupported(authority, isi), } } -pub fn visit_grant(visitor: &mut V, authority: &AccountId, isi: &GrantBox) { +pub fn visit_grant(visitor: &mut V, authority: &AccountId, isi: &GrantExpr) { let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); let object = evaluate_expr!(visitor, authority, ::object()); - match (object, destination_id) { - (Value::PermissionToken(object), IdBox::AccountId(destination_id)) => visitor - .visit_grant_account_permission( - authority, - Grant { - object, - destination_id, - }, - ), - (Value::Id(IdBox::RoleId(object)), IdBox::AccountId(destination_id)) => visitor - .visit_grant_account_role( - authority, - Grant { - object, - destination_id, - }, - ), + match object { + Value::PermissionToken(object) => visitor.visit_grant_account_permission( + authority, + Grant { + object, + destination_id, + }, + ), + Value::Id(IdBox::RoleId(object)) => visitor.visit_grant_account_role( + authority, + Grant { + object, + destination_id, + }, + ), _ => visitor.visit_unsupported(authority, isi), } } -pub fn visit_revoke(visitor: &mut V, authority: &AccountId, isi: &RevokeBox) { +pub fn visit_revoke(visitor: &mut V, authority: &AccountId, isi: &RevokeExpr) { let destination_id = evaluate_expr!(visitor, authority, ::destination_id()); let object = evaluate_expr!(visitor, authority, ::object()); - match (object, destination_id) { - (Value::PermissionToken(object), IdBox::AccountId(destination_id)) => visitor - .visit_revoke_account_permission( - authority, - Revoke { - object, - destination_id, - }, - ), - (Value::Id(IdBox::RoleId(object)), IdBox::AccountId(destination_id)) => visitor - .visit_revoke_account_role( - authority, - Revoke { - object, - destination_id, - }, - ), + match object { + Value::PermissionToken(object) => visitor.visit_revoke_account_permission( + authority, + Revoke { + object, + destination_id, + }, + ), + Value::Id(IdBox::RoleId(object)) => visitor.visit_revoke_account_role( + authority, + Revoke { + object, + destination_id, + }, + ), _ => visitor.visit_unsupported(authority, isi), } } -pub fn visit_upgrade(visitor: &mut V, authority: &AccountId, isi: &UpgradeBox) { +pub fn visit_upgrade(visitor: &mut V, authority: &AccountId, isi: &UpgradeExpr) { let object = evaluate_expr!(visitor, authority, ::object()); match object { - UpgradableBox::Validator(object) => { - visitor.visit_upgrade_validator(authority, Upgrade { object }) + UpgradableBox::Executor(object) => { + visitor.visit_upgrade_executor(authority, Upgrade { object }); } } } -pub fn visit_if(visitor: &mut V, authority: &AccountId, isi: &Conditional) { - let condition = evaluate_expr!(visitor, authority, ::condition()); +pub fn visit_if(visitor: &mut V, authority: &AccountId, isi: &ConditionalExpr) { + let condition = evaluate_expr!(visitor, authority, ::condition()); - // TODO: Should visit both by default or not? It will affect Validator behavior + // TODO: Should visit both by default or not? It will affect Executor behavior // because only one branch needs to be executed. IMO both should be validated if condition { visitor.visit_instruction(authority, isi.then()); @@ -646,7 +671,7 @@ pub fn visit_if(visitor: &mut V, authority: &AccountId, isi: } } -pub fn visit_pair(visitor: &mut V, authority: &AccountId, isi: &Pair) { +pub fn visit_pair(visitor: &mut V, authority: &AccountId, isi: &PairExpr) { visitor.visit_instruction(authority, isi.left_instruction()); visitor.visit_instruction(authority, isi.right_instruction()); } @@ -654,7 +679,7 @@ pub fn visit_pair(visitor: &mut V, authority: &AccountId, isi pub fn visit_sequence( visitor: &mut V, authority: &AccountId, - isi: &SequenceBox, + isi: &SequenceExpr, ) { for instruction in isi.instructions() { visitor.visit_instruction(authority, instruction); @@ -673,43 +698,45 @@ leaf_visitors! { // Instruction visitors visit_register_account(Register), visit_unregister_account(Unregister), - visit_mint_account_public_key(Mint), - visit_burn_account_public_key(Burn), - visit_mint_account_signature_check_condition(Mint), + visit_mint_account_public_key(Mint), + visit_burn_account_public_key(Burn), + visit_mint_account_signature_check_condition(Mint), visit_set_account_key_value(SetKeyValue), visit_remove_account_key_value(RemoveKeyValue), visit_register_asset(Register), visit_unregister_asset(Unregister), - visit_mint_asset(Mint), - visit_burn_asset(Burn), + visit_mint_asset(Mint), + visit_burn_asset(Burn), visit_transfer_asset(Transfer), visit_set_asset_key_value(SetKeyValue), visit_remove_asset_key_value(RemoveKeyValue), visit_register_asset_definition(Register), visit_unregister_asset_definition(Unregister), - visit_transfer_asset_definition(Transfer), + visit_transfer_asset_definition(Transfer), visit_set_asset_definition_key_value(SetKeyValue), visit_remove_asset_definition_key_value(RemoveKeyValue), visit_register_domain(Register), visit_unregister_domain(Unregister), + visit_transfer_domain(Transfer), visit_set_domain_key_value(SetKeyValue), visit_remove_domain_key_value(RemoveKeyValue), visit_register_peer(Register), visit_unregister_peer(Unregister), - visit_grant_account_permission(Grant), - visit_revoke_account_permission(Revoke), + visit_grant_account_permission(Grant), + visit_revoke_account_permission(Revoke), visit_register_role(Register), visit_unregister_role(Unregister), - visit_grant_account_role(Grant), - visit_revoke_account_role(Revoke), - visit_register_trigger(Register>), - visit_unregister_trigger(Unregister>), - visit_mint_trigger_repetitions(Mint, u32>), - visit_upgrade_validator(Upgrade), + visit_grant_account_role(Grant), + visit_revoke_account_role(Revoke), + visit_register_trigger(Register>), + visit_unregister_trigger(Unregister>), + visit_mint_trigger_repetitions(Mint>), + visit_burn_trigger_repetitions(Burn>), + visit_upgrade_executor(Upgrade), visit_new_parameter(NewParameter), visit_set_parameter(SetParameter), visit_execute_trigger(ExecuteTrigger), - visit_fail(&FailBox), + visit_fail(&Fail), visit_log(Log), // Query visitors @@ -753,5 +780,4 @@ leaf_visitors! { visit_find_trigger_by_id(&FindTriggerById), visit_find_trigger_key_value_by_id_and_key(&FindTriggerKeyValueByIdAndKey), visit_find_triggers_by_domain_id(&FindTriggersByDomainId), - visit_is_asset_definition_owner(&IsAssetDefinitionOwner), } diff --git a/data_model/src/wasm.rs b/data_model/src/wasm.rs deleted file mode 100644 index 4d22a3cc218..00000000000 --- a/data_model/src/wasm.rs +++ /dev/null @@ -1,120 +0,0 @@ -//! This module contains data and structures related only to WASM execution - -pub mod export { - //! Data which is exported from WASM to Iroha - - /// Name of the exported memory - pub const WASM_MEMORY: &str = "memory"; - - pub mod fn_names { - //! Names of the functions which are exported from Iroha to WASM - - /// Exported function to allocate memory - pub const WASM_ALLOC: &str = "_iroha_wasm_alloc"; - /// Exported function to deallocate memory - pub const WASM_DEALLOC: &str = "_iroha_wasm_dealloc"; - /// Name of the exported entry for smart contract execution - pub const SMART_CONTRACT_MAIN: &str = "_iroha_smart_contract_main"; - /// Name of the exported entry for trigger execution - pub const TRIGGER_MAIN: &str = "_iroha_trigger_main"; - /// Name of the exported entry for validator to validate transaction - pub const VALIDATOR_VALIDATE_TRANSACTION: &str = "_iroha_validator_validate_transaction"; - /// Name of the exported entry for validator to validate instruction - pub const VALIDATOR_VALIDATE_INSTRUCTION: &str = "_iroha_validator_validate_instruction"; - /// Name of the exported entry for validator to validate query - pub const VALIDATOR_VALIDATE_QUERY: &str = "_iroha_validator_validate_query"; - /// Name of the exported entry for validator to perform migration - pub const VALIDATOR_MIGRATE: &str = "_iroha_validator_migrate"; - } -} - -pub mod import { - //! Data which is imported from Iroha to WASM - - /// Name of the linked wasm module - pub const MODULE: &str = "iroha"; - - pub mod fn_names { - //! Names of the functions which are imported from Iroha to WASM - - /// Name of the imported function to execute instructions - pub const EXECUTE_ISI: &str = "execute_instruction"; - /// Name of the imported function to execute queries - pub const EXECUTE_QUERY: &str = "execute_query"; - /// Name of the imported function to get payload for smart contract - /// [`main()`](super::super::export::fn_names::SMART_CONTRACT_MAIN) entrypoint - pub const GET_SMART_CONTRACT_PAYLOAD: &str = "get_smart_contract_payload"; - /// Name of the imported function to get payload for trigger - /// [`main()`](super::super::export::fn_names::TRIGGER_MAIN) entrypoint - pub const GET_TRIGGER_PAYLOAD: &str = "get_trigger_payload"; - /// Name of the imported function to get payload for - /// [`migrate()`](super::super::export::fn_names::VALIDATOR_MIGRATE) entrypoint - pub const GET_MIGRATE_PAYLOAD: &str = "get_migrate_payload"; - /// Name of the imported function to get payload for - /// [`validate_transaction()`](super::super::export::fn_names::VALIDATOR_VALIDATE_TRANSACTION) entrypoint - pub const GET_VALIDATE_TRANSACTION_PAYLOAD: &str = "get_validate_transaction_payload"; - /// Name of the imported function to get payload for - /// [`validate_instruction()`](super::super::export::fn_names::VALIDATOR_VALIDATE_INSTRUCTION) entrypoint - pub const GET_VALIDATE_INSTRUCTION_PAYLOAD: &str = "get_validate_instruction_payload"; - /// Name of the imported function to get payload for - /// [`validate_query()`](super::super::export::fn_names::VALIDATOR_VALIDATE_QUERY) entrypoint - pub const GET_VALIDATE_QUERY_PAYLOAD: &str = "get_validate_query_payload"; - /// Name of the imported function to debug print objects - pub const DBG: &str = "dbg"; - /// Name of the imported function to log objects - pub const LOG: &str = "log"; - /// Name of the imported function to set new [`PermissionTokenSchema`](crate::permission::PermissionTokenSchema) - pub const SET_PERMISSION_TOKEN_SCHEMA: &str = "set_permission_token_schema"; - } -} - -pub mod payloads { - //! Payloads with function arguments for different entrypoints - - use parity_scale_codec::{Decode, Encode}; - - use crate::prelude::*; - - /// Payload for smart contract [`main()`](super::export::fn_names::SMART_CONTRACT_MAIN) entrypoint - #[derive(Debug, Clone, Encode, Decode)] - pub struct SmartContract { - /// Smart contract owner who submitted transaction with it - pub owner: AccountId, - } - - /// Payload for trigger [`main()`](super::export::fn_names::TRIGGER_MAIN) entrypoint - #[derive(Debug, Clone, Encode, Decode)] - pub struct Trigger { - /// Trigger owner who registered the trigger - pub owner: AccountId, - /// Event which triggered the execution - pub event: Event, - } - - /// Payload for [`migrate()`](super::export::fn_names::VALIDATOR_MIGRATE) entrypoint - #[derive(Debug, Clone, Copy, Encode, Decode)] - pub struct Migrate { - /// Height of the latest block in the blockchain - pub block_height: u64, - } - - /// Payload for [`validate_transaction()`](super::export::fn_names::VALIDATOR_VALIDATE_TRANSACTION) entrypoint - pub type ValidateTransaction = Validate; - - /// Payload for [`validate_instruction()`](super::export::fn_names::VALIDATOR_VALIDATE_INSTRUCTION) entrypoint - pub type ValidateInstruction = Validate; - - /// Payload for [`validate_query()`](super::export::fn_names::VALIDATOR_VALIDATE_QUERY) entrypoint - pub type ValidateQuery = Validate; - - /// Generic payload for `validate_*()` entrypoints of validator. - #[derive(Debug, Clone, Encode, Decode)] - pub struct Validate { - /// Authority which executed the operation to be validated - pub authority: AccountId, - /// Height of the latest block in the blockchain - pub block_height: u64, - /// Operation to be validated - pub to_validate: T, - } -} diff --git a/data_model/tests/data_model.rs b/data_model/tests/data_model.rs index 6c4c3bb6ef6..09cf1d602ff 100644 --- a/data_model/tests/data_model.rs +++ b/data_model/tests/data_model.rs @@ -1,12 +1,10 @@ -#![allow(clippy::too_many_lines, clippy::restriction)] - use std::str::FromStr as _; use iroha_data_model::{prelude::*, ParseError}; #[test] fn transfer_isi_should_be_valid() { - let _instruction = TransferBox::new( + let _instruction = TransferExpr::new( IdBox::AssetId("btc##seller@crypto".parse().expect("Valid")), 12_u32, IdBox::AccountId("buyer@crypto".parse().expect("Valid")), @@ -18,9 +16,9 @@ fn find_quantity_and_check_it_greater_than_value_isi_should_be_valid() { let asset_id: AssetId = "rose##alice@wonderland".parse().expect("Valid"); let find_asset = QueryBox::from(FindAssetQuantityById::new(asset_id)); - let _instruction = Conditional::new( + let _instruction = ConditionalExpr::new( Not::new(Greater::new(EvaluatesTo::new_unchecked(find_asset), 10_u32)), - FailBox::new("rate is less or equal to value"), + Fail::new("rate is less or equal to value"), ); } @@ -39,8 +37,8 @@ impl FindRateAndCheckItGreaterThanValue { } } - pub fn into_isi(self) -> Conditional { - Conditional::new( + pub fn into_isi(self) -> ConditionalExpr { + ConditionalExpr::new( Not::new(Greater::new( EvaluatesTo::new_unchecked(QueryBox::from(FindAssetQuantityById::new( AssetId::new( @@ -52,7 +50,7 @@ impl FindRateAndCheckItGreaterThanValue { ))), self.value, )), - FailBox::new("rate is less or equal to value"), + Fail::new("rate is less or equal to value"), ) } } diff --git a/default_validator/.cargo/config.toml b/default_executor/.cargo/config.toml similarity index 100% rename from default_validator/.cargo/config.toml rename to default_executor/.cargo/config.toml diff --git a/default_validator/Cargo.toml b/default_executor/Cargo.toml similarity index 79% rename from default_validator/Cargo.toml rename to default_executor/Cargo.toml index f45204042fd..fea5b33a828 100644 --- a/default_validator/Cargo.toml +++ b/default_executor/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "iroha_default_validator" +name = "iroha_default_executor" edition = "2021" -version = "2.0.0-pre-rc.19" +version = "2.0.0-pre-rc.20" # TODO: teams are being deprecated update the authors URL authors = ["Iroha 2 team "] @@ -24,6 +24,7 @@ opt-level = "z" # Optimize for size vs speed with "s"/"z"(removes vectorizat codegen-units = 1 # Further reduces binary size but increases compilation time [dependencies] -iroha_validator = { version = "2.0.0-pre-rc.19", path = "../wasm/validator", features = ["debug"]} +iroha_executor = { version = "2.0.0-pre-rc.20", path = "../smart_contract/executor", features = ["debug"]} +lol_alloc = "0.4.0" panic-halt = "0.2.0" diff --git a/default_validator/LICENSE b/default_executor/LICENSE similarity index 100% rename from default_validator/LICENSE rename to default_executor/LICENSE diff --git a/default_validator/README.md b/default_executor/README.md similarity index 55% rename from default_validator/README.md rename to default_executor/README.md index 98d12732107..a404dd83950 100644 --- a/default_validator/README.md +++ b/default_executor/README.md @@ -1,8 +1,8 @@ -# `iroha_default_validator` +# `iroha_default_executor` Use the [Wasm Builder CLI](../tools/wasm_builder_cli) in order to build it: ```bash cargo run --bin iroha_wasm_builder_cli -- \ - build ./default_validator --optimize --outfile ./configs/peer/validator.wasm + build ./default_executor --optimize --outfile ./configs/peer/executor.wasm ``` \ No newline at end of file diff --git a/default_executor/src/lib.rs b/default_executor/src/lib.rs new file mode 100644 index 00000000000..4834bcc9228 --- /dev/null +++ b/default_executor/src/lib.rs @@ -0,0 +1,61 @@ +//! Iroha default executor. + +#![no_std] + +extern crate alloc; +#[cfg(not(test))] +extern crate panic_halt; + +use alloc::borrow::ToOwned as _; + +use iroha_executor::{default::default_permission_token_schema, prelude::*, smart_contract}; +use lol_alloc::{FreeListAllocator, LockedAllocator}; + +#[global_allocator] +static ALLOC: LockedAllocator = LockedAllocator::new(FreeListAllocator::new()); + +/// Executor that replaces some of [`Validate`]'s methods with sensible defaults +/// +/// # Warning +/// +/// The defaults are not guaranteed to be stable. +#[derive(Clone, Constructor, Debug, ValidateEntrypoints, ExpressionEvaluator, Validate, Visit)] +pub struct Executor { + verdict: Result, + block_height: u64, + host: smart_contract::Host, +} + +impl Executor { + fn ensure_genesis(block_height: u64) -> MigrationResult { + if block_height != 0 { + return Err("Default Executor is intended to be used only in genesis. \ + Write your own executor if you need to upgrade executor on existing chain." + .to_owned()); + } + + Ok(()) + } +} + +/// Migrate previous executor to the current version. +/// Called by Iroha once just before upgrading executor. +/// +/// # Errors +/// +/// Concrete errors are specific to the implementation. +/// +/// If `migrate()` entrypoint fails then the whole `Upgrade` instruction +/// will be denied and previous executor will stay unchanged. +#[entrypoint] +pub fn migrate(block_height: u64) -> MigrationResult { + Executor::ensure_genesis(block_height)?; + + let schema = default_permission_token_schema(); + let (token_ids, schema_str) = schema.serialize(); + iroha_executor::set_permission_token_schema( + &iroha_executor::data_model::permission::PermissionTokenSchema::new(token_ids, schema_str), + ); + + Ok(()) +} diff --git a/default_validator/src/lib.rs b/default_validator/src/lib.rs deleted file mode 100644 index 7a670099a5a..00000000000 --- a/default_validator/src/lib.rs +++ /dev/null @@ -1,196 +0,0 @@ -//! Iroha default validator. - -#![no_std] -#![allow(missing_docs, clippy::missing_errors_doc)] - -extern crate alloc; -#[cfg(not(test))] -extern crate panic_halt; - -use alloc::borrow::ToOwned as _; - -use iroha_validator::{ - data_model::evaluate::ExpressionEvaluator, default::default_permission_token_schema, - iroha_wasm, prelude::*, -}; - -/// Validator that replaces some of [`Validate`]'s methods with sensible defaults -/// -/// # Warning -/// -/// The defaults are not guaranteed to be stable. -#[derive(Debug, Clone)] -pub struct Validator { - verdict: Result, - block_height: u64, - host: iroha_wasm::Host, -} - -impl Validator { - /// Construct [`Self`] - pub fn new(block_height: u64) -> Self { - Self { - verdict: Ok(()), - block_height, - host: iroha_wasm::Host, - } - } - - fn ensure_genesis(block_height: u64) -> MigrationResult { - if block_height != 0 { - return Err("Default Validator is intended to be used only in genesis. \ - Write your own validator if you need to upgrade validator on existing chain." - .to_owned()); - } - - Ok(()) - } -} - -macro_rules! defaults { - ( $($validator:ident $(<$param:ident $(: $bound:path)?>)?($operation:ty)),+ $(,)? ) => { $( - fn $validator $(<$param $(: $bound)?>)?(&mut self, authority: &AccountId, operation: $operation) { - iroha_validator::default::$validator(self, authority, operation) - } )+ - }; -} - -impl Visit for Validator { - defaults! { - visit_unsupported(T), - - visit_transaction(&VersionedSignedTransaction), - visit_instruction(&InstructionBox), - visit_expression(&EvaluatesTo), - visit_sequence(&SequenceBox), - visit_if(&Conditional), - visit_pair(&Pair), - - // Peer validation - visit_unregister_peer(Unregister), - - // Domain validation - visit_unregister_domain(Unregister), - visit_set_domain_key_value(SetKeyValue), - visit_remove_domain_key_value(RemoveKeyValue), - - // Account validation - visit_unregister_account(Unregister), - visit_mint_account_public_key(Mint), - visit_burn_account_public_key(Burn), - visit_mint_account_signature_check_condition(Mint), - visit_set_account_key_value(SetKeyValue), - visit_remove_account_key_value(RemoveKeyValue), - - // Asset validation - visit_register_asset(Register), - visit_unregister_asset(Unregister), - visit_mint_asset(Mint), - visit_burn_asset(Burn), - visit_transfer_asset(Transfer), - visit_set_asset_key_value(SetKeyValue), - visit_remove_asset_key_value(RemoveKeyValue), - - // AssetDefinition validation - visit_unregister_asset_definition(Unregister), - visit_transfer_asset_definition(Transfer), - visit_set_asset_definition_key_value(SetKeyValue), - visit_remove_asset_definition_key_value(RemoveKeyValue), - - // Permission validation - visit_grant_account_permission(Grant), - visit_revoke_account_permission(Revoke), - - // Role validation - visit_register_role(Register), - visit_unregister_role(Unregister), - visit_grant_account_role(Grant), - visit_revoke_account_role(Revoke), - - // Trigger validation - visit_unregister_trigger(Unregister>), - visit_mint_trigger_repetitions(Mint, u32>), - visit_execute_trigger(ExecuteTrigger), - - // Parameter validation - visit_set_parameter(SetParameter), - visit_new_parameter(NewParameter), - - // Upgrade validation - visit_upgrade_validator(Upgrade), - } -} - -impl Validate for Validator { - fn verdict(&self) -> &Result { - &self.verdict - } - - fn block_height(&self) -> u64 { - self.block_height - } - - fn deny(&mut self, reason: ValidationFail) { - self.verdict = Err(reason); - } -} - -impl ExpressionEvaluator for Validator { - fn evaluate( - &self, - expression: &E, - ) -> core::result::Result { - self.host.evaluate(expression) - } -} - -/// Migrate previous validator to the current version. -/// Called by Iroha once just before upgrading validator. -/// -/// # Errors -/// -/// Concrete errors are specific to the implementation. -/// -/// If `migrate()` entrypoint fails then the whole `Upgrade` instruction -/// will be denied and previous validator will stay unchanged. -#[entrypoint] -pub fn migrate(block_height: u64) -> MigrationResult { - Validator::ensure_genesis(block_height)?; - - let schema = default_permission_token_schema(); - let (token_ids, schema_str) = schema.serialize(); - iroha_validator::iroha_wasm::set_permission_token_schema( - &iroha_validator::data_model::permission::PermissionTokenSchema::new(token_ids, schema_str), - ); - - Ok(()) -} - -#[entrypoint] -pub fn validate_transaction( - authority: AccountId, - transaction: VersionedSignedTransaction, - block_height: u64, -) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_transaction(&authority, &transaction); - validator.verdict -} - -#[entrypoint] -pub fn validate_instruction( - authority: AccountId, - instruction: InstructionBox, - block_height: u64, -) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_instruction(&authority, &instruction); - validator.verdict -} - -#[entrypoint] -pub fn validate_query(authority: AccountId, query: QueryBox, block_height: u64) -> Result { - let mut validator = Validator::new(block_height); - validator.visit_query(&authority, &query); - validator.verdict -} diff --git a/docker-compose.dev.local.yml b/docker-compose.dev.local.yml index 8e0fe8d562e..162572d7dd7 100644 --- a/docker-compose.dev.local.yml +++ b/docker-compose.dev.local.yml @@ -1,3 +1,6 @@ +# This file is generated by iroha_swarm. +# Do not edit it manually. + version: '3.8' services: iroha0: @@ -8,14 +11,12 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"8f4c15e5d664da3f13778801d23d4e89b76e94c1b94b389544168b6cb894f84f8ba62848cf767d72e7f7f4b9d2d7ba07fee33760f79abe5597a51520e292a0cb"}' TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:8080 - TORII_TELEMETRY_URL: iroha0:8180 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"82b3bde54aebeca4146257da0de8d59d8e46d5fe34887dcd8072866792fcb3ad4164bf554923ece1fd412d241036d863a6ae430476c898248b8237d77534cfc4"}' SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1337:1337 - 8080:8080 - - 8180:8180 volumes: - ./configs/peer:/config init: true @@ -28,13 +29,11 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"c02ffad5e455e7ec620d74de5769681e4d8385906bce5a437eb67452a9efbbc2815bbdc9775d28c3633269b25f22d048e2aa2e36017cbe5ad85f15220beb6f6f"}' TORII_P2P_ADDR: iroha1:1338 TORII_API_URL: iroha1:8081 - TORII_TELEMETRY_URL: iroha1:8181 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1338:1338 - 8081:8081 - - 8181:8181 volumes: - ./configs/peer:/config init: true @@ -46,13 +45,11 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"29c5ed1409cb10fd791bc4ff8a6cb5e22a5fae7e36f448ef3ea2988b1319a88bf417e0371e6adb32fd66749477402b1ab67f84a8e9b082e997980cc91f327736"}' TORII_P2P_ADDR: iroha2:1339 TORII_API_URL: iroha2:8082 - TORII_TELEMETRY_URL: iroha2:8182 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1339:1339 - 8082:8082 - - 8182:8182 volumes: - ./configs/peer:/config init: true @@ -64,13 +61,11 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"5eed4855fad183c451aac39dfc50831607e4cf408c98e2b977f3ce4a2df42ce2a66522370d60b9c09e79ade2e9bb1ef2e78733a944b999b3a6aee687ce476d61"}' TORII_P2P_ADDR: iroha3:1340 TORII_API_URL: iroha3:8083 - TORII_TELEMETRY_URL: iroha3:8183 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1340:1340 - 8083:8083 - - 8183:8183 volumes: - ./configs/peer:/config init: true diff --git a/docker-compose.dev.single.yml b/docker-compose.dev.single.yml index ed47d5d35bc..9a4891ff227 100644 --- a/docker-compose.dev.single.yml +++ b/docker-compose.dev.single.yml @@ -1,3 +1,6 @@ +# This file is generated by iroha_swarm. +# Do not edit it manually. + version: '3.8' services: iroha0: @@ -8,14 +11,12 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"8f4c15e5d664da3f13778801d23d4e89b76e94c1b94b389544168b6cb894f84f8ba62848cf767d72e7f7f4b9d2d7ba07fee33760f79abe5597a51520e292a0cb"}' TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:8080 - TORII_TELEMETRY_URL: iroha0:8180 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"82b3bde54aebeca4146257da0de8d59d8e46d5fe34887dcd8072866792fcb3ad4164bf554923ece1fd412d241036d863a6ae430476c898248b8237d77534cfc4"}' SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"}]' ports: - 1337:1337 - 8080:8080 - - 8180:8180 volumes: - ./configs/peer:/config init: true diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index e96423a2864..9c45d746a06 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -1,3 +1,6 @@ +# This file is generated by iroha_swarm. +# Do not edit it manually. + version: '3.8' services: iroha0: @@ -8,14 +11,12 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"8f4c15e5d664da3f13778801d23d4e89b76e94c1b94b389544168b6cb894f84f8ba62848cf767d72e7f7f4b9d2d7ba07fee33760f79abe5597a51520e292a0cb"}' TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:8080 - TORII_TELEMETRY_URL: iroha0:8180 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"82b3bde54aebeca4146257da0de8d59d8e46d5fe34887dcd8072866792fcb3ad4164bf554923ece1fd412d241036d863a6ae430476c898248b8237d77534cfc4"}' SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1337:1337 - 8080:8080 - - 8180:8180 volumes: - ./configs/peer:/config init: true @@ -28,13 +29,11 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"c02ffad5e455e7ec620d74de5769681e4d8385906bce5a437eb67452a9efbbc2815bbdc9775d28c3633269b25f22d048e2aa2e36017cbe5ad85f15220beb6f6f"}' TORII_P2P_ADDR: iroha1:1338 TORII_API_URL: iroha1:8081 - TORII_TELEMETRY_URL: iroha1:8181 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1338:1338 - 8081:8081 - - 8181:8181 volumes: - ./configs/peer:/config init: true @@ -46,13 +45,11 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"29c5ed1409cb10fd791bc4ff8a6cb5e22a5fae7e36f448ef3ea2988b1319a88bf417e0371e6adb32fd66749477402b1ab67f84a8e9b082e997980cc91f327736"}' TORII_P2P_ADDR: iroha2:1339 TORII_API_URL: iroha2:8082 - TORII_TELEMETRY_URL: iroha2:8182 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1339:1339 - 8082:8082 - - 8182:8182 volumes: - ./configs/peer:/config init: true @@ -64,13 +61,11 @@ services: IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"5eed4855fad183c451aac39dfc50831607e4cf408c98e2b977f3ce4a2df42ce2a66522370d60b9c09e79ade2e9bb1ef2e78733a944b999b3a6aee687ce476d61"}' TORII_P2P_ADDR: iroha3:1340 TORII_API_URL: iroha3:8083 - TORII_TELEMETRY_URL: iroha3:8183 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01204164BF554923ECE1FD412D241036D863A6AE430476C898248B8237D77534CFC4 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha1:1338","public_key":"ed0120815BBDC9775D28C3633269B25F22D048E2AA2E36017CBE5AD85F15220BEB6F6F"},{"address":"iroha0:1337","public_key":"ed01208BA62848CF767D72E7F7F4B9D2D7BA07FEE33760F79ABE5597A51520E292A0CB"},{"address":"iroha3:1340","public_key":"ed0120A66522370D60B9C09E79ADE2E9BB1EF2E78733A944B999B3A6AEE687CE476D61"},{"address":"iroha2:1339","public_key":"ed0120F417E0371E6ADB32FD66749477402B1AB67F84A8E9B082E997980CC91F327736"}]' ports: - 1340:1340 - 8083:8083 - - 8183:8183 volumes: - ./configs/peer:/config init: true diff --git a/docs/README.md b/docs/README.md index 2744d219045..11e941c5e8f 100644 --- a/docs/README.md +++ b/docs/README.md @@ -3,8 +3,8 @@ This is the main Iroha 2 documentation that you will find useful: - [Tutorial](https://hyperledger.github.io/iroha-2-docs/) -- [API Specification](./source/references/api_spec.md) -- [Configuration Options](./source/references/config.md) +- [API Reference](https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints) +- [Configuration Reference](./source/references/config.md) - [Iroha 2 Whitepaper](./source/iroha_2_whitepaper.md) ## Tools @@ -19,5 +19,4 @@ Documentation for Iroha 2 tools: The following is useful for development: - [Hot reload Iroha in a Docker container](./source/guides/hot-reload.md) -- [Develop a new Iroha module](./source/guides/develop-iroha-module.md) - [Benchmark your code](../client/benches/tps/README.md) diff --git a/docs/source/guides/develop-iroha-module.md b/docs/source/guides/develop-iroha-module.md deleted file mode 100644 index b8810cfa5ba..00000000000 --- a/docs/source/guides/develop-iroha-module.md +++ /dev/null @@ -1,140 +0,0 @@ -# How to Develop a New Iroha Module - -When you need to add some functionality to Iroha, use this guide to develop a new Iroha Module. - -## Prerequisites - -* [Rust](https://www.rust-lang.org/tools/install) -* Text Editor or IDE - -## Steps - -### 1. Create new Rust module inside Iroha crate - -Inside `core/src/lib.rs` add a declaration of your new module. -For example, for `bridge` module we add the following declaration, - -```rust -#[cfg(feature = "bridge")] -pub mod bridge; -``` - -so for you module `x` you would add `pub mod x;`. -You should also place your new module under the [Cargo feature](https://doc.rust-lang.org/cargo/reference/features.html) so other developers would be able to turn it on and off when needed. - -Now, create a separate file for your module. -For `bridge` module it will be `core/src/bridge.rs`. -Likewise, for your module `x` you will need to create a new file `core/src/x.rs`. - -### 2. Add documentation - -Each module must provide description of its own functionality via Rust Docs. - -For that, at the beginning of the module file you should place docs block for the enclosing item. - -```rust -//! Here you can see a good description of the module `x` and its functionality. -``` - -All public entites of your module should be documented as well. But first, let's create them. - -### 3. Write your logic - -The development of a new Iroha Module has a goal of bringing new functionality to Iroha. -So based on the goal and requirements, you have you will introduce new entities and place them inside newly created module. - -Let's specify particular categories of such entities and look how they can be implemented according to Iroha best practices. - -#### 4. Add custom Iroha Special Instruction - -If you need to have some module-related Iroha Special Instructions you should add `isi` submodule to the file of your newly created module, like that: - -```rust -... -pub mod isi { -} -``` - -Inside this submodule you may declare new Iroha Special Instructions. -To provide safety guarantees, Iroha Modules can create new Iroha Special Instructions composed of the Out of the Box Instructions. - -Let's look at the [example](https://github.com/hyperledger/iroha/blob/2005335348585b03b3ee7887272af4c76170c10a/iroha/src/bridge.rs) from the `bridge` Iroha Module: - -```rust -... -pub fn register_bridge(&self, bridge_definition: BridgeDefinition) -> Instruction { - let seed = crate::crypto::hash(bridge_definition.encode()); - let public_key = crate::crypto::generate_key_pair_from_seed(seed) - .expect("Failed to generate key pair.") - .0; - let domain = Domain::new(bridge_definition.id.name.clone()); - let account = Account::new("bridge", &domain.name, public_key); - Instruction::If( - Box::new(Instruction::ExecuteQuery(IrohaQuery::GetAccount( - GetAccount { - account_id: bridge_definition.owner_account_id.clone(), - }, - ))), - Box::new(Instruction::Sequence(vec![ - Add { - object: domain.clone(), - destination_id: self.id.clone(), - } - .into(), - Register { - object: account.clone(), - destination_id: domain.name, - } - .into(), - Mint { - object: ( - "owner_id".to_string(), - bridge_definition.owner_account_id.to_string(), - ), - destination_id: AssetId { - definition_id: owner_asset_definition_id(), - account_id: account.id.clone(), - }, - } - .into(), - Mint { - object: ( - "bridge_definition".to_string(), - format!("{:?}", bridge_definition.encode()), - ), - destination_id: AssetId { - definition_id: bridge_asset_definition_id(), - account_id: account.id, - }, - } - .into(), - ])), - None, - ) -} -... -``` - -And see what it does to register a new Bridge: - -1. Check that Bridge's Owner's Account exists and terminate execution if not. -1. Add new Domain. -1. Register new Account. -1. Mint one Asset. -1. Mint another Asset. - -We will not discuss Bridge-related terminology here – the thing we want to look at is how we can compose these steps into one new Iroha Special Instruction. - -As you can see, we have `Instruction::If(...)` here. It's [the utility Iroha Special Instruction](references/glossary#utility-iroha-special-instruction). -It takes three arguments: `condition`, `instruction_to_do_if_true`, `instruction_to_do_if_false_or_nothing`. -By this instruction we've made the first step of our algorithm: run a check and terminate execution if there is no Owner's Account. -Inside `condition` we placed `Instruction::ExecuteQuery(...)` which fails if [Iroha Query](references/glossary#iroha-query) fails. - -If the first step succeeds, we should move forward and execute sequence of the following steps. -For this purpose we also have a utility Iroha Special Instruction `Sequence` with a [vector](https://doc.rust-lang.org/alloc/vec/struct.Vec.html) of Iroha Special Instructions executed one by one. - -Inside this sequence we use [domains-related Iroha Special Instructions](references/glossary#domains-related-iroha-special-instruction) `Add`, `Register`, and `Mint` twice. - -## Additional resources - -* //TODO: add link to the pair programming session on `Bridge` module. diff --git a/docs/source/guides/hot-reload.md b/docs/source/guides/hot-reload.md index 98dd49561e9..584ef643944 100644 --- a/docs/source/guides/hot-reload.md +++ b/docs/source/guides/hot-reload.md @@ -7,7 +7,7 @@ Here is the overall procedure for hot reloading Iroha in a Docker container: To avoid issues with dynamic linking, run: ```bash - cargo build --release --target x86_64-unknown-linux-musl --features "vendored" + cargo build --release --target x86_64-unknown-linux-musl ```

An explanation for using `cargo build` with these parameters. @@ -15,7 +15,6 @@ Here is the overall procedure for hot reloading Iroha in a Docker container: You may experience an issue with dynamic linking if your host OS has a newer version of `glibc` compared to the one in the Docker container. The options used in the command above resolve the issue: - `--target x86_64-unknown-linux-musl` forces static linking against `musl` libc implementation - - `--features "vendored"` facilitates static linkage of the `openssl` library
diff --git a/docs/source/iroha_2_whitepaper.md b/docs/source/iroha_2_whitepaper.md index 877e45a9e1f..10f762f111f 100644 --- a/docs/source/iroha_2_whitepaper.md +++ b/docs/source/iroha_2_whitepaper.md @@ -339,7 +339,7 @@ To retrieve information about World State View of the Peer clients will use Iroh ### 2.14 Client API -Client libraries interact with Iroha over the HTTP and WebSocket. Check the up to date API reference [here](./references/api_spec.md). +Client libraries interact with Iroha over the HTTP and WebSocket. For details about the available endpoints, see [API Reference > Torii Endpoints](https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints). ### 2.15 Versioning diff --git a/docs/source/lts_selection.org b/docs/source/lts_selection.org index 9ad06ad2f6d..45bde0cc9dd 100644 --- a/docs/source/lts_selection.org +++ b/docs/source/lts_selection.org @@ -65,7 +65,7 @@ for selecting a particular version as an LTS candidate This is different to point 1, because while RC10 and RC13 were free from major problems, they did not offer sufficiently full coverage - of the features required for the major project stakeholders. + of the features required for the major project stakeholders. 4. The release must be compatible with all SDKs. @@ -73,7 +73,7 @@ for selecting a particular version as an LTS candidate candidates, creating a catch 22 situation, it is an exception. The two other major SDKs, namely Kotlin, and TypeScript must be compatible with LTS candidate. If the SDK cannot be made compatible - with the candidate, the candidate release is disqualified. + with the candidate, the candidate release is disqualified. 5. The release must be well-tested. @@ -84,7 +84,7 @@ for selecting a particular version as an LTS candidate This generally applies to all releases after RC6 and until RC17, as the development team was told of a way of restructuring the integration tests, but the process only ended with RC17 and the - introduction of the =pytest= framework. + introduction of the =pytest= framework. While passing integration tests is necessary it is not sufficient, the following minimal testing is required to qualify a release as @@ -186,4 +186,4 @@ because rather than a misapplication of an existing term, this is an invention of a term that /could/ have subtle differences from a release candidate. In reality these should have been called *development snapshots*. But in order not to break continuity too -much with the existing SDKs a compromise was chosen. +much with the existing SDKs a compromise was chosen. diff --git a/docs/source/references/api_spec.md b/docs/source/references/api_spec.md deleted file mode 100644 index 75d1a6f8130..00000000000 --- a/docs/source/references/api_spec.md +++ /dev/null @@ -1,381 +0,0 @@ -# API Specification for Client Libraries - -## Endpoints for API - -### Transaction - -**Protocol**: HTTP - -**Encoding**: [Parity Scale Codec](#parity-scale-codec) - -**Endpoint**: `/transaction` - -**Method**: `POST` - -**Expects**: Body: `VersionedSignedTransaction` - -**Responses**: - -| Status | Description | -|--------|------------------------------------------------------------------------| -| 200 | Transaction Accepted (But not guaranteed to have passed consensus yet) | -| 400 | Transaction Rejected (Malformed) | -| 401 | Transaction Rejected (Improperly signed) | - -### Query - -**Protocol**: HTTP - -**Encoding**: [Parity Scale Codec](#parity-scale-codec) - -**Endpoint**: `/query` - -**Method**: `POST` - -**Expects**: - -- Body: `VersionedSignedQuery` -- Query parameters: - - `start`: Optional parameter in queries where results can be indexed. Use to return results from specified point. - Results are ordered where can be by id which uses - rust's [PartialOrd](https://doc.rust-lang.org/std/cmp/trait.PartialOrd.html#derivable) - and [Ord](https://doc.rust-lang.org/std/cmp/trait.Ord.html) traits. - - `limit`: Optional parameter in queries where results can be indexed. Use to return specific number of results. - - `sort_by_metadata_key`: Optional parameter in queries. Use to sort results containing metadata with a given key. - -**Responses**: - -| Response | Status | Body | -|-----------------|--------|--------------------------------------------| -| Signature err. | 401 | `QueryExecutionFail::Signature(String)` | -| Permission err. | 403 | `QueryExecutionFail::Permission(String)` | -| Evaluate err. | 400 | `QueryExecutionFail::Evaluate(String)` | -| Find err. | 404 | `QueryExecutionFail::Find(Box)` | -| Conversion err. | 400 | `QueryExecutionFail::Conversion(String)` | -| Success | 200 | `VersionedPaginatedQueryResult` | - -#### Account Not Found 404 - -Whether each prerequisite object was found and `FindError`: - -| Domain | Account | `FindError` | -|--------|---------|---------------------------------| -| N | - | `FindError::Domain(DomainId)` | -| Y | N | `FindError::Account(AccountId)` | - -#### Asset Not Found 404 - -Whether each prerequisite object was found and `FindError`: - -| Domain | Account | Asset Definition | Asset | `FindError` | -|--------|---------|------------------|-------|-------------------------------------------------| -| N | - | - | - | `FindError::Domain(DomainId)` | -| Y | N | - | - | `FindError::Account(AccountId)` | -| Y | - | N | - | `FindError::AssetDefinition(AssetDefinitionId)` | -| Y | Y | Y | N | `FindError::Asset(AssetId)` | - -### Events - -**Protocol**: HTTP - -**Protocol Upgrade**: `WebSocket` - -**Encoding**: [Parity Scale Codec](#parity-scale-codec) - -**Endpoint**: `/events` - -**Communication**: - -After handshake, client should send `VersionedEventSubscriptionRequest`. Then server sends `VersionedEventMessage`. - -**Notes**: - -Usually, the client waits for Transaction events. - -Transaction event statuses can be either `Validating`, `Committed` or `Rejected`. - -Transaction statuses proceed from `Validating` to either `Committed` or `Rejected`. -However, due to the distributed nature of the network, some peers might receive events out of order (e.g. `Committed` -before `Validating`). - -It's possible that some peers in the network are offline for the validation round. If the client connects to them while -they are offline, the peers might not respond with the `Validating` status. -But when the offline peers come back online they will synchronize the blocks. They are then guaranteed to respond with -the `Committed` (or `Rejected`) status depending on the information found in the block. - -### Pending Transactions - -**Protocol**: HTTP - -**Encoding**: [Parity Scale Codec](#parity-scale-codec) - -**Endpoint**: `/pending_transactions` - -**Method**: `GET` - -**Expects**: - -_Internal use only._ Returns the transactions pending at the moment. - -### Blocks Stream - -**Protocol**: HTTP - -**Protocol Upgrade**: `WebSocket` - -**Encoding**: [Parity Scale Codec](#parity-scale-codec) - -**Endpoint**: `/block/stream` - -**Communication**: - -Client should send `VersionedBlockSubscriptionRequest` to initiate communication after WebSocket handshake. Then server sends `VersionedBlockMessage`. - -**Notes**: - -Via this endpoint client first provides the starting block number (i.e. height) in the subscription request. After -sending the confirmation message, server starts streaming all the blocks from the given block number up to the current -block and continues to stream blocks as they are added to the blockchain. - -### Get Configuration - -**Protocol**: HTTP - -**Encoding**: JSON - -**Endpoint**: `/configuration` - -**Method**: `GET` - -**Expects**: -There are 2 options: - -- Expects: a JSON body `"Value"`. Returns: configuration value as JSON. -- Expects: a JSON body that specifies the field (see example below). Returns: documentation for a specific field (as - JSON string) or `null`. - -Note that if the requested field has more fields inside of it, then all the documentation for its inner members is -returned as well. -Here is an example for getting a field `a.b.c`: - -```json -{ - "Docs": [ - "a", - "b", - "c" - ] -} -``` - -**Examples**: -To get the top-level configuration docs for [`Torii`] and all the fields within it: - -```bash -curl -X GET -H 'content-type: application/json' http://127.0.0.1:8080/configuration -d '{"Docs" : ["torii"]} ' -i -``` - -**Responses**: - -- 200 OK: Field was found and either doc or value is returned in json body. -- 404 Not Found: Field wasn't found - -### Configuration - -**Protocol**: HTTP - -**Encoding**: JSON - -**Endpoint**: `/configuration` - -**Method**: `POST` - -**Expects**: -One configuration option is currently supported: `LogLevel`. It is set to the log-level in uppercase. - -```json -{ - "LogLevel": "WARN" -} -``` - -Acceptable values are `TRACE`, `DEBUG`, `INFO`, `WARN`, `ERROR`, corresponding to -the [respective configuration options](./config.md#loggermaxloglevel). - -**Responses**: - -- 200 OK: Log level has changed successfully. The confirmed new log level is returned in the body. -- 400 Bad Request: request body malformed. -- 500 Internal Server Error: Request body valid, but changing the log level failed (lock contention). - -### Health - -**Protocol**: HTTP - -**Encoding**: JSON - -**Endpoint**: `/health` - -**Method**: `GET` - -**Expects**: - - -**Responses**: - -- 200 OK: The peer is up. - Also returns current status of peer in json string: - -``` -"Healthy" -``` - -## Endpoints for Status/Metrics - -### Status - -**Protocol**: HTTP - -**Encoding**: JSON - -**Endpoint**: `/status` - -**Method**: `GET` - -**Expects**: - - -**Responses**: - -200 OK reports status as JSON: - -```json5 -// Note: while this snippet is JSON5 (for better readability), -// the actual response is JSON -{ - /** - * Number of connected peers, except for the reporting peer itself - */ - peers: 3, - /** - * Number of committed blocks (block height) - */ - blocks: 1, - /** - * Total number of accepted transactions - */ - txs_accepted: 3, - /** - * Total number of rejected transactions - */ - txs_rejected: 0, - /** - * Uptime with nanosecond precision since creation of the genesis block - */ - uptime: { - secs: 5, - nanos: 937000000, - }, - /** - * Number of view changes in the current round - */ - view_changes: 0, -} -``` - -**CAUTION**: Almost all fields are 64-bit integers and should be handled with care in JavaScript. Only the `nanos` field -is a 32-bit integer. See `iroha_telemetry::metrics::Status`. - -**Sub-routing**: To obtain the value of a specific field, one can append the name of the field to the path, -e.g. `status/peers`. This returns the corresponding JSON value, inline, so strings are quoted, numbers are not and maps -are presented as above. - -### Metrics - -**Protocol**: HTTP - -**Encoding**: Prometheus - -**Endpoint**: `/metrics` - -**Method**: `GET` - -**Expects**: - - -**Responses**: - -- 200 OK reports 8 of 10 metrics: - -```bash -# HELP accounts User accounts registered at this time -# TYPE accounts gauge -accounts{domain="genesis"} 1 -accounts{domain="wonderland"} 1 -# HELP block_height Current block height -# TYPE block_height counter -block_height 1 -# HELP connected_peers Total number of currently connected peers -# TYPE connected_peers gauge -connected_peers 0 -# HELP domains Total number of domains -# TYPE domains gauge -domains 2 -# HELP tx_amount average amount involved in a transaction on this peer -# TYPE tx_amount histogram -tx_amount_bucket{le="0.005"} 0 -tx_amount_bucket{le="0.01"} 0 -tx_amount_bucket{le="0.025"} 0 -tx_amount_bucket{le="0.05"} 0 -tx_amount_bucket{le="0.1"} 0 -tx_amount_bucket{le="0.25"} 0 -tx_amount_bucket{le="0.5"} 0 -tx_amount_bucket{le="1"} 0 -tx_amount_bucket{le="2.5"} 0 -tx_amount_bucket{le="5"} 0 -tx_amount_bucket{le="10"} 0 -tx_amount_bucket{le="+Inf"} 2 -tx_amount_sum 26 -tx_amount_count 2 -# HELP txs Transactions committed -# TYPE txs counter -txs{type="accepted"} 1 -txs{type="rejected"} 0 -txs{type="total"} 1 -# HELP uptime_since_genesis_ms Network up-time, from creation of the genesis block -# TYPE uptime_since_genesis_ms gauge -uptime_since_genesis_ms 54572974 -# HELP view_changes Number of view_changes in the current round -# TYPE view_changes gauge -view_changes 0 -``` - -Learn [how to use metrics](https://hyperledger.github.io/iroha-2-docs/guide/advanced/metrics.html). - -### API Version - -**Protocol**: HTTP - -**Encoding**: JSON - -**Endpoint**: `/api_version` - -**Method**: `GET` - -**Expects**: - - -**Responses**: - -- 200 OK: The current version of API used by Iroha returned as a json string. - Grabbed from the genesis block's version, so at least a minimal subnet of 4 peers - should be running and the genesis be submitted at the time of request. - -``` -"1" -``` - -## Parity Scale Codec - -For more information on codec check [Substrate Dev Hub](https://substrate.dev/docs/en/knowledgebase/advanced/codec) and -codec's [GitHub repository](https://github.com/paritytech/parity-scale-codec). - -## Reference Iroha Client Implementation - -[Iroha client in Rust.](../../../client) diff --git a/docs/source/references/config.md b/docs/source/references/config.md index 215f3e38627..3bf1e5759b6 100644 --- a/docs/source/references/config.md +++ b/docs/source/references/config.md @@ -8,7 +8,7 @@ In this document we provide a reference and detailed descriptions of Iroha's con A type wrapped in a single `Option<..>` signifies that in the corresponding `json` block there is a fallback value for this type, and that it only serves as a reference. If a default for such a type has a `null` value, it means that there is no meaningful fallback available for this particular value. -All the default values can be freely obtained from a provided [sample configuration file](../../../configs/peer/config.json), but it should only serve as a starting point. If left unchanged, the sample configuration file would still fail to build due to it having `null` in place of [public](#public_key) and [private](#private_key) keys as well as [endpoint](#torii.api_url) [URLs](#torii.telemetry_url). These should be provided either by modifying the sample config file or as environment variables. No other overloading of configuration values happens besides reading them from a file and capturing the environment variables. +All the default values can be freely obtained from a provided [sample configuration file](../../../configs/peer/config.json), but it should only serve as a starting point. If left unchanged, the sample configuration file would still fail to build due to it having `null` in place of [public](#public_key) and [private](#private_key) keys as well as [API endpoint URL](#torii.api_url). These should be provided either by modifying the sample config file or as environment variables. No other overloading of configuration values happens besides reading them from a file and capturing the environment variables. For both types of configuration options wrapped in a single `Option<..>` (i.e. both those that have meaningful defaults and those that have `null`), failure to provide them in any of the above two ways results in an error. @@ -33,6 +33,7 @@ The following is the default configuration used by Iroha. "PUBLIC_KEY": null, "PRIVATE_KEY": null, "DISABLE_PANIC_TERMINAL_COLORS": false, + "EXIT_AFTER_INIT": false, "KURA": { "INIT_MODE": "strict", "BLOCK_STORE_PATH": "./storage", @@ -54,11 +55,8 @@ The following is the default configuration used by Iroha. "TORII": { "P2P_ADDR": null, "API_URL": null, - "TELEMETRY_URL": null, "MAX_TRANSACTION_SIZE": 32768, - "MAX_CONTENT_LEN": 16384000, - "FETCH_SIZE": 10, - "QUERY_IDLE_TIME_MS": 30000 + "MAX_CONTENT_LEN": 16384000 }, "BLOCK_SYNC": { "GOSSIP_PERIOD_MS": 10000, @@ -126,6 +124,9 @@ The following is the default configuration used by Iroha. "CREATE_EVERY_MS": 60000, "DIR_PATH": "./storage", "CREATION_ENABLED": true + }, + "LIVE_QUERY_STORE": { + "QUERY_IDLE_TIME_MS": 30000 } } ``` @@ -184,11 +185,21 @@ Has type `Option`[^1]. Can be configured via environment variable `IROHA_D false ``` +## `exit_after_init` + +Exit after initialization for startup time testing + +Has type `Option`[^1]. Can be configured via environment variable `IROHA_EXIT_AFTER_INIT` + +```json +false +``` + ## `genesis` `GenesisBlock` configuration -Has type `Option`[^1]. Can be configured via environment variable `IROHA_GENESIS` +Has type `Option>`[^1]. Can be configured via environment variable `IROHA_GENESIS` ```json { @@ -221,7 +232,7 @@ null `Kura` configuration -Has type `Option`[^1]. Can be configured via environment variable `IROHA_KURA` +Has type `Option>`[^1]. Can be configured via environment variable `IROHA_KURA` ```json { @@ -283,11 +294,33 @@ Has type `Option`[^1]. Can be configured via environment variable `KURA_IN "strict" ``` +## `live_query_store` + +LiveQueryStore configuration + +Has type `Option`[^1]. Can be configured via environment variable `IROHA_LIVE_QUERY_STORE` + +```json +{ + "QUERY_IDLE_TIME_MS": 30000 +} +``` + +### `live_query_store.query_idle_time_ms` + +Time query can remain in the store if unaccessed + +Has type `Option`[^1]. Can be configured via environment variable `LIVE_QUERY_STORE_QUERY_IDLE_TIME_MS` + +```json +30000 +``` + ## `logger` `Logger` configuration -Has type `Option`[^1]. Can be configured via environment variable `IROHA_LOGGER` +Has type `Option>`[^1]. Can be configured via environment variable `IROHA_LOGGER` ```json { @@ -450,7 +483,7 @@ Has type `Option`[^1]. Can be configured via environment variable `QUEUE_TR SnapshotMaker configuration -Has type `Option`[^1]. Can be configured via environment variable `IROHA_SNAPSHOT` +Has type `Option>`[^1]. Can be configured via environment variable `IROHA_SNAPSHOT` ```json { @@ -494,7 +527,7 @@ Has type `Option`[^1]. Can be configured via environment variable `SNAPS `Sumeragi` configuration -Has type `Option`[^1]. Can be configured via environment variable `IROHA_SUMERAGI` +Has type `Option>`[^1]. Can be configured via environment variable `IROHA_SUMERAGI` ```json { @@ -604,7 +637,7 @@ null Telemetry configuration -Has type `Option`[^1]. Can be configured via environment variable `IROHA_TELEMETRY` +Has type `Option>`[^1]. Can be configured via environment variable `IROHA_TELEMETRY` ```json { @@ -670,17 +703,14 @@ null `Torii` configuration -Has type `Option`[^1]. Can be configured via environment variable `IROHA_TORII` +Has type `Option>`[^1]. Can be configured via environment variable `IROHA_TORII` ```json { "API_URL": null, - "FETCH_SIZE": 10, "MAX_CONTENT_LEN": 16384000, "MAX_TRANSACTION_SIZE": 32768, - "P2P_ADDR": null, - "QUERY_IDLE_TIME_MS": 30000, - "TELEMETRY_URL": null + "P2P_ADDR": null } ``` @@ -694,16 +724,6 @@ Has type `Option`[^1]. Can be configured via environment variable `T null ``` -### `torii.fetch_size` - -How many query results are returned in one batch - -Has type `Option`[^1]. Can be configured via environment variable `TORII_FETCH_SIZE` - -```json -10 -``` - ### `torii.max_content_len` Maximum number of bytes in raw message. Used to prevent from DOS attacks. @@ -734,31 +754,11 @@ Has type `Option`[^1]. Can be configured via environment variable `T null ``` -### `torii.query_idle_time_ms` - -Time query can remain in the store if unaccessed - -Has type `Option`[^1]. Can be configured via environment variable `TORII_QUERY_IDLE_TIME_MS` - -```json -30000 -``` - -### `torii.telemetry_url` - -Torii address for reporting internal status and metrics for administration. - -Has type `Option`[^1]. Can be configured via environment variable `TORII_TELEMETRY_URL` - -```json -null -``` - ## `wsv` `WorldStateView` configuration -Has type `Option`[^1]. Can be configured via environment variable `IROHA_WSV` +Has type `Option>`[^1]. Can be configured via environment variable `IROHA_WSV` ```json { diff --git a/docs/source/references/schema.json b/docs/source/references/schema.json index 8352748b3fb..756d22082be 100644 --- a/docs/source/references/schema.json +++ b/docs/source/references/schema.json @@ -20,10 +20,6 @@ { "name": "metadata", "type": "Metadata" - }, - { - "name": "roles", - "type": "SortedVec" } ] }, @@ -149,13 +145,13 @@ }, "AccountId": { "Struct": [ - { - "name": "name", - "type": "Name" - }, { "name": "domain_id", "type": "DomainId" + }, + { + "name": "name", + "type": "Name" } ] }, @@ -183,7 +179,7 @@ } ] }, - "Action": { + "Action": { "Struct": [ { "name": "executable", @@ -207,30 +203,6 @@ } ] }, - "Action": { - "Struct": [ - { - "name": "executable", - "type": "OptimizedExecutable" - }, - { - "name": "repeats", - "type": "Repeats" - }, - { - "name": "authority", - "type": "AccountId" - }, - { - "name": "filter", - "type": "TriggeringFilterBox" - }, - { - "name": "metadata", - "type": "Metadata" - } - ] - }, "Add": { "Struct": [ { @@ -619,6 +591,24 @@ ] }, "BatchedResponse": { + "Enum": [ + { + "tag": "V1", + "discriminant": 1, + "type": "BatchedResponseV1" + } + ] + }, + "BatchedResponse>": { + "Enum": [ + { + "tag": "V1", + "discriminant": 1, + "type": "BatchedResponseV1>" + } + ] + }, + "BatchedResponseV1": { "Struct": [ { "name": "batch", @@ -630,11 +620,11 @@ } ] }, - "BatchedResponse>": { + "BatchedResponseV1>": { "Struct": [ { "name": "batch", - "type": "Vec" + "type": "Vec" }, { "name": "cursor", @@ -657,40 +647,52 @@ "BlockHeader": { "Struct": [ { - "name": "timestamp", - "type": "u128" + "name": "height", + "type": "u64" }, { - "name": "consensus_estimation", + "name": "timestamp_ms", "type": "u64" }, { - "name": "height", - "type": "u64" + "name": "previous_block_hash", + "type": "Option>" + }, + { + "name": "transactions_hash", + "type": "Option>>" }, { "name": "view_change_index", "type": "u64" }, { - "name": "previous_block_hash", - "type": "Option>" + "name": "consensus_estimation_ms", + "type": "u64" + } + ] + }, + "BlockMessage": "SignedBlock", + "BlockPayload": { + "Struct": [ + { + "name": "header", + "type": "BlockHeader" }, { - "name": "transactions_hash", - "type": "Option>>" + "name": "commit_topology", + "type": "UniqueVec" }, { - "name": "rejected_transactions_hash", - "type": "Option>>" + "name": "transactions", + "type": "Vec" }, { - "name": "committed_with_topology", - "type": "Vec" + "name": "event_recommendations", + "type": "Vec" } ] }, - "BlockMessage": "VersionedCommittedBlock", "BlockRejectionReason": { "Enum": [ { @@ -700,7 +702,7 @@ ] }, "BlockSubscriptionRequest": "NonZero", - "BurnBox": { + "BurnExpr": { "Struct": [ { "name": "object", @@ -712,27 +714,7 @@ } ] }, - "CommittedBlock": { - "Struct": [ - { - "name": "header", - "type": "BlockHeader" - }, - { - "name": "transactions", - "type": "Vec" - }, - { - "name": "event_recommendations", - "type": "Vec" - }, - { - "name": "signatures", - "type": "SignaturesOf" - } - ] - }, - "Conditional": { + "ConditionalExpr": { "Struct": [ { "name": "condition", @@ -740,11 +722,11 @@ }, { "name": "then", - "type": "InstructionBox" + "type": "InstructionExpr" }, { "name": "otherwise", - "type": "Option" + "type": "Option" } ] }, @@ -927,9 +909,9 @@ "type": "ConfigurationEvent" }, { - "tag": "Validator", + "tag": "Executor", "discriminant": 9, - "type": "ValidatorEvent" + "type": "ExecutorEvent" } ] }, @@ -970,6 +952,10 @@ { "name": "metadata", "type": "Metadata" + }, + { + "name": "owned_by", + "type": "AccountId" } ] }, @@ -1004,6 +990,11 @@ "tag": "MetadataRemoved", "discriminant": 5, "type": "MetadataChanged" + }, + { + "tag": "OwnerChanged", + "discriminant": 6, + "type": "DomainOwnerChanged" } ] }, @@ -1025,14 +1016,18 @@ "tag": "ByMetadataRemoved", "discriminant": 3 }, + { + "tag": "ByOwnerChanged", + "discriminant": 4 + }, { "tag": "ByAccount", - "discriminant": 4, + "discriminant": 5, "type": "FilterOpt" }, { "tag": "ByAssetDefinition", - "discriminant": 5, + "discriminant": 6, "type": "FilterOpt" } ] @@ -1057,6 +1052,18 @@ } ] }, + "DomainOwnerChanged": { + "Struct": [ + { + "name": "domain_id", + "type": "DomainId" + }, + { + "name": "new_owner", + "type": "AccountId" + } + ] + }, "Duration": { "Tuple": [ "u64", @@ -1107,7 +1114,7 @@ } ] }, - "EvaluatesTo>": { + "EvaluatesTo>": { "Struct": [ { "name": "expression", @@ -1115,7 +1122,7 @@ } ] }, - "EvaluatesTo>": { + "EvaluatesTo>": { "Struct": [ { "name": "expression", @@ -1287,7 +1294,7 @@ { "tag": "Instructions", "discriminant": 0, - "type": "Vec" + "type": "Vec" }, { "tag": "Wasm", @@ -1296,14 +1303,6 @@ } ] }, - "ExecuteTriggerBox": { - "Struct": [ - { - "name": "trigger_id", - "type": "EvaluatesTo" - } - ] - }, "ExecuteTriggerEvent": { "Struct": [ { @@ -1328,6 +1327,14 @@ } ] }, + "ExecuteTriggerExpr": { + "Struct": [ + { + "name": "trigger_id", + "type": "EvaluatesTo" + } + ] + }, "ExecutionTime": { "Enum": [ { @@ -1341,6 +1348,36 @@ } ] }, + "Executor": { + "Struct": [ + { + "name": "wasm", + "type": "WasmSmartContract" + } + ] + }, + "ExecutorEvent": { + "Enum": [ + { + "tag": "Upgraded", + "discriminant": 0 + } + ] + }, + "ExecutorMode": { + "Enum": [ + { + "tag": "Path", + "discriminant": 0, + "type": "String" + }, + { + "tag": "Inline", + "discriminant": 1, + "type": "Executor" + } + ] + }, "Expression": { "Enum": [ { @@ -1445,7 +1482,7 @@ } ] }, - "FailBox": { + "Fail": { "Struct": [ { "name": "message", @@ -1920,7 +1957,7 @@ "Struct": [ { "name": "hash", - "type": "EvaluatesTo>" + "type": "EvaluatesTo>" } ] }, @@ -1974,12 +2011,12 @@ { "tag": "Block", "discriminant": 5, - "type": "HashOf" + "type": "HashOf" }, { "tag": "Transaction", "discriminant": 6, - "type": "HashOf" + "type": "HashOf" }, { "tag": "Peer", @@ -2050,7 +2087,7 @@ "Struct": [ { "name": "hash", - "type": "EvaluatesTo>" + "type": "EvaluatesTo>" } ] }, @@ -2133,7 +2170,7 @@ } ] }, - "GrantBox": { + "GrantExpr": { "Struct": [ { "name": "object", @@ -2141,7 +2178,7 @@ }, { "name": "destination_id", - "type": "EvaluatesTo" + "type": "EvaluatesTo" } ] }, @@ -2158,21 +2195,20 @@ ] }, "Hash": "Array", - "HashOf>": "Hash", - "HashOf": "Hash", - "HashOf": "Hash", - "HashOf": "Hash", + "HashOf>": "Hash", + "HashOf": "Hash", + "HashOf": "Hash", "HashValue": { "Enum": [ { "tag": "Transaction", "discriminant": 0, - "type": "HashOf" + "type": "HashOf" }, { "tag": "Block", "discriminant": 1, - "type": "HashOf" + "type": "HashOf" } ] }, @@ -2275,7 +2311,7 @@ { "tag": "Trigger", "discriminant": 9, - "type": "TriggerBox" + "type": "Trigger" }, { "tag": "Role", @@ -2305,100 +2341,6 @@ } ] }, - "InstructionBox": { - "Enum": [ - { - "tag": "Register", - "discriminant": 0, - "type": "RegisterBox" - }, - { - "tag": "Unregister", - "discriminant": 1, - "type": "UnregisterBox" - }, - { - "tag": "Mint", - "discriminant": 2, - "type": "MintBox" - }, - { - "tag": "Burn", - "discriminant": 3, - "type": "BurnBox" - }, - { - "tag": "Transfer", - "discriminant": 4, - "type": "TransferBox" - }, - { - "tag": "If", - "discriminant": 5, - "type": "Conditional" - }, - { - "tag": "Pair", - "discriminant": 6, - "type": "Pair" - }, - { - "tag": "Sequence", - "discriminant": 7, - "type": "SequenceBox" - }, - { - "tag": "SetKeyValue", - "discriminant": 8, - "type": "SetKeyValueBox" - }, - { - "tag": "RemoveKeyValue", - "discriminant": 9, - "type": "RemoveKeyValueBox" - }, - { - "tag": "Grant", - "discriminant": 10, - "type": "GrantBox" - }, - { - "tag": "Revoke", - "discriminant": 11, - "type": "RevokeBox" - }, - { - "tag": "ExecuteTrigger", - "discriminant": 12, - "type": "ExecuteTriggerBox" - }, - { - "tag": "SetParameter", - "discriminant": 13, - "type": "SetParameterBox" - }, - { - "tag": "NewParameter", - "discriminant": 14, - "type": "NewParameterBox" - }, - { - "tag": "Upgrade", - "discriminant": 15, - "type": "UpgradeBox" - }, - { - "tag": "Log", - "discriminant": 16, - "type": "LogBox" - }, - { - "tag": "Fail", - "discriminant": 17, - "type": "FailBox" - } - ] - }, "InstructionEvaluationError": { "Enum": [ { @@ -2486,7 +2428,7 @@ "Struct": [ { "name": "instruction", - "type": "InstructionBox" + "type": "InstructionExpr" }, { "name": "reason", @@ -2494,55 +2436,149 @@ } ] }, - "InstructionType": { + "InstructionExpr": { "Enum": [ { "tag": "Register", - "discriminant": 0 + "discriminant": 0, + "type": "RegisterExpr" }, { "tag": "Unregister", - "discriminant": 1 + "discriminant": 1, + "type": "UnregisterExpr" }, { "tag": "Mint", - "discriminant": 2 + "discriminant": 2, + "type": "MintExpr" }, { "tag": "Burn", - "discriminant": 3 + "discriminant": 3, + "type": "BurnExpr" }, { "tag": "Transfer", - "discriminant": 4 + "discriminant": 4, + "type": "TransferExpr" }, { "tag": "If", - "discriminant": 5 + "discriminant": 5, + "type": "ConditionalExpr" }, { "tag": "Pair", - "discriminant": 6 + "discriminant": 6, + "type": "PairExpr" }, { "tag": "Sequence", - "discriminant": 7 + "discriminant": 7, + "type": "SequenceExpr" }, { "tag": "SetKeyValue", - "discriminant": 8 + "discriminant": 8, + "type": "SetKeyValueExpr" }, { "tag": "RemoveKeyValue", - "discriminant": 9 + "discriminant": 9, + "type": "RemoveKeyValueExpr" }, { "tag": "Grant", - "discriminant": 10 + "discriminant": 10, + "type": "GrantExpr" }, { "tag": "Revoke", - "discriminant": 11 + "discriminant": 11, + "type": "RevokeExpr" + }, + { + "tag": "ExecuteTrigger", + "discriminant": 12, + "type": "ExecuteTriggerExpr" + }, + { + "tag": "SetParameter", + "discriminant": 13, + "type": "SetParameterExpr" + }, + { + "tag": "NewParameter", + "discriminant": 14, + "type": "NewParameterExpr" + }, + { + "tag": "Upgrade", + "discriminant": 15, + "type": "UpgradeExpr" + }, + { + "tag": "Log", + "discriminant": 16, + "type": "LogExpr" + }, + { + "tag": "Fail", + "discriminant": 17, + "type": "Fail" + } + ] + }, + "InstructionType": { + "Enum": [ + { + "tag": "Register", + "discriminant": 0 + }, + { + "tag": "Unregister", + "discriminant": 1 + }, + { + "tag": "Mint", + "discriminant": 2 + }, + { + "tag": "Burn", + "discriminant": 3 + }, + { + "tag": "Transfer", + "discriminant": 4 + }, + { + "tag": "If", + "discriminant": 5 + }, + { + "tag": "Pair", + "discriminant": 6 + }, + { + "tag": "Sequence", + "discriminant": 7 + }, + { + "tag": "SetKeyValue", + "discriminant": 8 + }, + { + "tag": "RemoveKeyValue", + "discriminant": 9 + }, + { + "tag": "Grant", + "discriminant": 10 + }, + { + "tag": "Revoke", + "discriminant": 11 }, { "tag": "ExecuteTrigger", @@ -2612,18 +2648,6 @@ "Ipv4Predicate": "Array, 4>", "Ipv6Addr": "Array", "Ipv6Predicate": "Array, 8>", - "IsAssetDefinitionOwner": { - "Struct": [ - { - "name": "asset_definition_id", - "type": "EvaluatesTo" - }, - { - "name": "account_id", - "type": "EvaluatesTo" - } - ] - }, "LengthLimits": { "Struct": [ { @@ -2684,7 +2708,7 @@ } ] }, - "LogBox": { + "LogExpr": { "Struct": [ { "name": "level", @@ -2734,8 +2758,8 @@ } ] }, - "MerkleTree": { - "Vec": "HashOf" + "MerkleTree": { + "Vec": "HashOf" }, "Metadata": { "Struct": [ @@ -2837,7 +2861,7 @@ } ] }, - "MintBox": { + "MintExpr": { "Struct": [ { "name": "object", @@ -2994,7 +3018,7 @@ } ] }, - "NewParameterBox": { + "NewParameterExpr": { "Struct": [ { "name": "parameter", @@ -3067,20 +3091,6 @@ } ] }, - "OptimizedExecutable": { - "Enum": [ - { - "tag": "WasmInternalRepr", - "discriminant": 0, - "type": "WasmInternalRepr" - }, - { - "tag": "Instructions", - "discriminant": 1, - "type": "Vec" - } - ] - }, "Option": { "Option": "DomainId" }, @@ -3090,14 +3100,14 @@ "Option": { "Option": "Hash" }, - "Option>>": { - "Option": "HashOf>" + "Option>>": { + "Option": "HashOf>" }, - "Option>": { - "Option": "HashOf" + "Option>": { + "Option": "HashOf" }, - "Option": { - "Option": "InstructionBox" + "Option": { + "Option": "InstructionExpr" }, "Option": { "Option": "IpfsPath" @@ -3148,15 +3158,15 @@ "OriginFilter": "PeerId", "OriginFilter": "RoleId", "OriginFilter": "TriggerId", - "Pair": { + "PairExpr": { "Struct": [ { "name": "left_instruction", - "type": "InstructionBox" + "type": "InstructionExpr" }, { "name": "right_instruction", - "type": "InstructionBox" + "type": "InstructionExpr" } ] }, @@ -3476,124 +3486,119 @@ "discriminant": 16, "type": "FindTotalAssetQuantityByAssetDefinitionId" }, - { - "tag": "IsAssetDefinitionOwner", - "discriminant": 17, - "type": "IsAssetDefinitionOwner" - }, { "tag": "FindAssetKeyValueByIdAndKey", - "discriminant": 18, + "discriminant": 17, "type": "FindAssetKeyValueByIdAndKey" }, { "tag": "FindAssetDefinitionKeyValueByIdAndKey", - "discriminant": 19, + "discriminant": 18, "type": "FindAssetDefinitionKeyValueByIdAndKey" }, { "tag": "FindAllDomains", - "discriminant": 20, + "discriminant": 19, "type": "FindAllDomains" }, { "tag": "FindDomainById", - "discriminant": 21, + "discriminant": 20, "type": "FindDomainById" }, { "tag": "FindDomainKeyValueByIdAndKey", - "discriminant": 22, + "discriminant": 21, "type": "FindDomainKeyValueByIdAndKey" }, { "tag": "FindAllPeers", - "discriminant": 23, + "discriminant": 22, "type": "FindAllPeers" }, { "tag": "FindAllBlocks", - "discriminant": 24, + "discriminant": 23, "type": "FindAllBlocks" }, { "tag": "FindAllBlockHeaders", - "discriminant": 25, + "discriminant": 24, "type": "FindAllBlockHeaders" }, { "tag": "FindBlockHeaderByHash", - "discriminant": 26, + "discriminant": 25, "type": "FindBlockHeaderByHash" }, { "tag": "FindAllTransactions", - "discriminant": 27, + "discriminant": 26, "type": "FindAllTransactions" }, { "tag": "FindTransactionsByAccountId", - "discriminant": 28, + "discriminant": 27, "type": "FindTransactionsByAccountId" }, { "tag": "FindTransactionByHash", - "discriminant": 29, + "discriminant": 28, "type": "FindTransactionByHash" }, { "tag": "FindPermissionTokensByAccountId", - "discriminant": 30, + "discriminant": 29, "type": "FindPermissionTokensByAccountId" }, { "tag": "FindPermissionTokenSchema", - "discriminant": 31, + "discriminant": 30, "type": "FindPermissionTokenSchema" }, { "tag": "FindAllActiveTriggerIds", - "discriminant": 32, + "discriminant": 31, "type": "FindAllActiveTriggerIds" }, { "tag": "FindTriggerById", - "discriminant": 33, + "discriminant": 32, "type": "FindTriggerById" }, { "tag": "FindTriggerKeyValueByIdAndKey", - "discriminant": 34, + "discriminant": 33, "type": "FindTriggerKeyValueByIdAndKey" }, { "tag": "FindTriggersByDomainId", - "discriminant": 35, + "discriminant": 34, "type": "FindTriggersByDomainId" }, { "tag": "FindAllRoles", - "discriminant": 36, + "discriminant": 35, "type": "FindAllRoles" }, { "tag": "FindAllRoleIds", - "discriminant": 37, + "discriminant": 36, "type": "FindAllRoleIds" }, { "tag": "FindRoleByRoleId", - "discriminant": 38, + "discriminant": 37, "type": "FindRoleByRoleId" }, { "tag": "FindRolesByAccountId", - "discriminant": 39, + "discriminant": 38, "type": "FindRolesByAccountId" }, { "tag": "FindAllParameters", - "discriminant": 40, + "discriminant": 39, "type": "FindAllParameters" } ] @@ -3621,8 +3626,12 @@ "type": "String" }, { - "tag": "Unauthorized", + "tag": "UnknownCursor", "discriminant": 4 + }, + { + "tag": "FetchSizeTooBig", + "discriminant": 5 } ] }, @@ -3658,15 +3667,15 @@ "Struct": [ { "name": "transactions", - "type": "Vec>" + "type": "Vec>" }, { - "name": "validator", - "type": "ValidatorMode" + "name": "executor", + "type": "ExecutorMode" } ] }, - "RegisterBox": { + "RegisterExpr": { "Struct": [ { "name": "object", @@ -3704,7 +3713,7 @@ { "tag": "Trigger", "discriminant": 5, - "type": "Trigger" + "type": "Trigger" }, { "tag": "Role", @@ -3713,7 +3722,7 @@ } ] }, - "RemoveKeyValueBox": { + "RemoveKeyValueExpr": { "Struct": [ { "name": "object_id", @@ -3750,7 +3759,7 @@ } ] }, - "RevokeBox": { + "RevokeExpr": { "Struct": [ { "name": "object", @@ -3758,7 +3767,7 @@ }, { "name": "destination_id", - "type": "EvaluatesTo" + "type": "EvaluatesTo" } ] }, @@ -3896,15 +3905,15 @@ } ] }, - "SequenceBox": { + "SequenceExpr": { "Struct": [ { "name": "instructions", - "type": "Vec" + "type": "Vec" } ] }, - "SetKeyValueBox": { + "SetKeyValueExpr": { "Struct": [ { "name": "object_id", @@ -3920,7 +3929,7 @@ } ] }, - "SetParameterBox": { + "SetParameterExpr": { "Struct": [ { "name": "parameter", @@ -3940,15 +3949,28 @@ } ] }, - "SignatureCheckCondition": "EvaluatesTo", - "SignatureOf": "Signature", + "SignatureCheckCondition": { + "Enum": [ + { + "tag": "AnyAccountSignatureOr", + "discriminant": 0, + "type": "Vec" + }, + { + "tag": "AllAccountSignaturesAnd", + "discriminant": 1, + "type": "Vec" + } + ] + }, + "SignatureOf": "Signature", "SignatureOf": "Signature", "SignatureOf": "Signature", - "SignaturesOf": { + "SignaturesOf": { "Struct": [ { "name": "signatures", - "type": "SortedVec>" + "type": "SortedVec>" } ] }, @@ -3960,7 +3982,37 @@ } ] }, + "SignedBlock": { + "Enum": [ + { + "tag": "V1", + "discriminant": 1, + "type": "SignedBlockV1" + } + ] + }, + "SignedBlockV1": { + "Struct": [ + { + "name": "signatures", + "type": "SignaturesOf" + }, + { + "name": "payload", + "type": "BlockPayload" + } + ] + }, "SignedQuery": { + "Enum": [ + { + "tag": "V1", + "discriminant": 1, + "type": "SignedQueryV1" + } + ] + }, + "SignedQueryV1": { "Struct": [ { "name": "signature", @@ -3973,6 +4025,15 @@ ] }, "SignedTransaction": { + "Enum": [ + { + "tag": "V1", + "discriminant": 1, + "type": "SignedTransactionV1" + } + ] + }, + "SignedTransactionV1": { "Struct": [ { "name": "signatures", @@ -4093,11 +4154,8 @@ "SortedVec": { "Vec": "PublicKey" }, - "SortedVec": { - "Vec": "RoleId" - }, - "SortedVec>": { - "Vec": "SignatureOf" + "SortedVec>": { + "Vec": "SignatureOf" }, "SortedVec>": { "Vec": "SignatureOf" @@ -4216,12 +4274,12 @@ "TransactionQueryOutput": { "Struct": [ { - "name": "transaction", - "type": "TransactionValue" + "name": "block_hash", + "type": "HashOf" }, { - "name": "block_hash", - "type": "HashOf" + "name": "transaction", + "type": "TransactionValue" } ] }, @@ -4252,13 +4310,9 @@ "discriminant": 4, "type": "WasmExecutionFail" }, - { - "tag": "UnexpectedGenesisAccountSignature", - "discriminant": 5 - }, { "tag": "Expired", - "discriminant": 6 + "discriminant": 5 } ] }, @@ -4266,7 +4320,7 @@ "Struct": [ { "name": "value", - "type": "VersionedSignedTransaction" + "type": "SignedTransaction" }, { "name": "error", @@ -4274,7 +4328,7 @@ } ] }, - "TransferBox": { + "TransferExpr": { "Struct": [ { "name": "source_id", @@ -4290,7 +4344,7 @@ } ] }, - "Trigger": { + "Trigger": { "Struct": [ { "name": "id", @@ -4298,33 +4352,7 @@ }, { "name": "action", - "type": "Action" - } - ] - }, - "Trigger": { - "Struct": [ - { - "name": "id", - "type": "TriggerId" - }, - { - "name": "action", - "type": "Action" - } - ] - }, - "TriggerBox": { - "Enum": [ - { - "tag": "Raw", - "discriminant": 0, - "type": "Trigger" - }, - { - "tag": "Optimized", - "discriminant": 1, - "type": "Trigger" + "type": "Action" } ] }, @@ -4435,13 +4463,13 @@ }, "TriggerId": { "Struct": [ - { - "name": "name", - "type": "Name" - }, { "name": "domain_id", "type": "Option" + }, + { + "name": "name", + "type": "Name" } ] }, @@ -4500,7 +4528,8 @@ } ] }, - "UnregisterBox": { + "UniqueVec": "Vec", + "UnregisterExpr": { "Struct": [ { "name": "object_id", @@ -4511,13 +4540,13 @@ "UpgradableBox": { "Enum": [ { - "tag": "Validator", + "tag": "Executor", "discriminant": 0, - "type": "Validator" + "type": "Executor" } ] }, - "UpgradeBox": { + "UpgradeExpr": { "Struct": [ { "name": "object", @@ -4552,36 +4581,6 @@ } ] }, - "Validator": { - "Struct": [ - { - "name": "wasm", - "type": "WasmSmartContract" - } - ] - }, - "ValidatorEvent": { - "Enum": [ - { - "tag": "Upgraded", - "discriminant": 0 - } - ] - }, - "ValidatorMode": { - "Enum": [ - { - "tag": "Path", - "discriminant": 0, - "type": "String" - }, - { - "tag": "Inline", - "discriminant": 1, - "type": "Validator" - } - ] - }, "Value": { "Enum": [ { @@ -4667,7 +4666,7 @@ { "tag": "Block", "discriminant": 16, - "type": "VersionedCommittedBlock" + "type": "SignedBlock" }, { "tag": "BlockHeader", @@ -4690,9 +4689,9 @@ "type": "NumericValue" }, { - "tag": "Validator", + "tag": "Executor", "discriminant": 21, - "type": "Validator" + "type": "Executor" }, { "tag": "LogLevel", @@ -4762,8 +4761,8 @@ "Vec>": { "Vec": "GenericPredicateBox" }, - "Vec": { - "Vec": "InstructionBox" + "Vec": { + "Vec": "InstructionExpr" }, "Vec": { "Vec": "Name" @@ -4771,102 +4770,24 @@ "Vec": { "Vec": "PeerId" }, + "Vec": { + "Vec": "PublicKey" + }, + "Vec": { + "Vec": "SignedTransaction" + }, "Vec": { "Vec": "TransactionValue" }, "Vec": { "Vec": "Value" }, - "Vec>": { - "Vec": "Vec" - }, - "Vec": { - "Vec": "VersionedSignedTransaction" + "Vec>": { + "Vec": "Vec" }, "Vec": { "Vec": "u8" }, - "VersionedBatchedResponse": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "BatchedResponse" - } - ] - }, - "VersionedBatchedResponse>": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "BatchedResponse>" - } - ] - }, - "VersionedBlockMessage": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "BlockMessage" - } - ] - }, - "VersionedBlockSubscriptionRequest": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "BlockSubscriptionRequest" - } - ] - }, - "VersionedCommittedBlock": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "CommittedBlock" - } - ] - }, - "VersionedEventMessage": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "EventMessage" - } - ] - }, - "VersionedEventSubscriptionRequest": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "EventSubscriptionRequest" - } - ] - }, - "VersionedSignedQuery": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "SignedQuery" - } - ] - }, - "VersionedSignedTransaction": { - "Enum": [ - { - "tag": "V1", - "discriminant": 1, - "type": "SignedTransaction" - } - ] - }, "WasmExecutionFail": { "Struct": [ { @@ -4875,18 +4796,6 @@ } ] }, - "WasmInternalRepr": { - "Struct": [ - { - "name": "serialized", - "type": "Vec" - }, - { - "name": "blob_hash", - "type": "HashOf" - } - ] - }, "WasmSmartContract": "Vec", "Where": { "Struct": [ diff --git a/dsl/Cargo.toml b/dsl/Cargo.toml index 3c3adf0c2b6..7cdbd40ee01 100755 --- a/dsl/Cargo.toml +++ b/dsl/Cargo.toml @@ -1,8 +1,14 @@ [package] name = "iroha_dsl" -version = "2.0.0-pre-rc.19" -edition = "2021" -authors = ["Yasser"] + +edition.workspace = true +version.workspace = true +authors.workspace = true + +license.workspace = true + +[lints] +workspace = true [lib] proc-macro = true diff --git a/dsl/src/lib.rs b/dsl/src/lib.rs index f254dc88040..c1378c24b1d 100755 --- a/dsl/src/lib.rs +++ b/dsl/src/lib.rs @@ -1,3 +1,6 @@ +// TODO: add docs +#![allow(missing_docs)] + use std::{convert::TryFrom, iter::Peekable, str::FromStr}; use litrs::Literal; diff --git a/ffi/.cargo/config.toml b/ffi/.cargo/config.toml index 4ca8f7bc93b..10c68cf82ce 100644 --- a/ffi/.cargo/config.toml +++ b/ffi/.cargo/config.toml @@ -1,2 +1,2 @@ [target.wasm32-unknown-unknown] -runner = "webassembly-test-runner" +runner = "iroha_wasm_test_runner" diff --git a/ffi/Cargo.toml b/ffi/Cargo.toml index 3c95a1734af..64c50507343 100644 --- a/ffi/Cargo.toml +++ b/ffi/Cargo.toml @@ -8,6 +8,9 @@ authors.workspace = true license.workspace = true categories = ["development-tools::ffi"] +[lints] +workspace = true + [features] # Enables sharing mutable references of non-robust transmutable types across FFI. # When handing out non-robust mutable references across FFI, it's possible for the caller diff --git a/ffi/derive/Cargo.toml b/ffi/derive/Cargo.toml index 9cfa904eef3..5004ea9a52c 100644 --- a/ffi/derive/Cargo.toml +++ b/ffi/derive/Cargo.toml @@ -8,10 +8,15 @@ authors.workspace = true license.workspace = true categories = ["development-tools::ffi"] +[lints] +workspace = true + [lib] proc-macro = true [dependencies] +iroha_macro_utils = { workspace = true } + syn2 = { workspace = true, features = ["full", "visit", "visit-mut", "extra-traits"] } quote = { workspace = true } proc-macro2 = { workspace = true } @@ -19,7 +24,6 @@ manyhow = { workspace = true } darling = { workspace = true } rustc-hash = { workspace = true } -drop_bomb = "0.1.5" parse-display = "0.8.2" [dev-dependencies] diff --git a/ffi/derive/src/attr_parse/derive.rs b/ffi/derive/src/attr_parse/derive.rs index f33a62f592b..d1d36bb5832 100644 --- a/ffi/derive/src/attr_parse/derive.rs +++ b/ffi/derive/src/attr_parse/derive.rs @@ -70,13 +70,15 @@ impl FromAttributes for DeriveAttrs { for attr in attrs { if attr.path().is_ident("derive") { - let Some(list) = accumulator.handle(attr.meta.require_list().map_err(Into::into)) else { - continue + let Some(list) = accumulator.handle(attr.meta.require_list().map_err(Into::into)) + else { + continue; }; let Some(paths) = accumulator.handle( - list.parse_args_with(Punctuated::::parse_terminated).map_err(Into::into) + list.parse_args_with(Punctuated::::parse_terminated) + .map_err(Into::into), ) else { - continue + continue; }; for path in paths { @@ -145,7 +147,7 @@ mod test { RustcDerive::Debug, ].into_iter().map(Derive::Rustc).collect(), } - ) + ); } #[test] @@ -160,7 +162,7 @@ mod test { GetSetDerive::CopyGetters, ].into_iter().map(Derive::GetSet).collect(), } - ) + ); } #[test] @@ -173,6 +175,6 @@ mod test { "Kek".to_string(), ].into_iter().map(Derive::Other).collect(), } - ) + ); } } diff --git a/ffi/derive/src/attr_parse/getset.rs b/ffi/derive/src/attr_parse/getset.rs index 471f13af610..208c949956e 100644 --- a/ffi/derive/src/attr_parse/getset.rs +++ b/ffi/derive/src/attr_parse/getset.rs @@ -78,14 +78,11 @@ impl syn2::parse::Parse for SpannedGetSetOptions { } else { errors.push(syn2::Error::new( lit.span(), - format!( - "Failed to parse getset options at {}: duplicate visibility", - part - ), + format!("Failed to parse getset options at {part}: duplicate visibility",), )); } } else { - errors.push(syn2::Error::new(lit.span(), format!("Failed to parse getset options at `{}`: expected visibility or `with_prefix`", part))); + errors.push(syn2::Error::new(lit.span(), format!("Failed to parse getset options at `{part}`: expected visibility or `with_prefix`"))); } } @@ -144,7 +141,7 @@ impl syn2::parse::Parse for SpannedGetSetAttrToken { let span = ident .span() .join(options.span) - .expect("must be in the same file"); + .unwrap_or_else(|| ident.span()); Ok(SpannedGetSetAttrToken { span, @@ -183,7 +180,7 @@ fn insert_gen_request( match gen_map.entry(mode) { Entry::Occupied(_) => accumulator.push( - darling::Error::custom(format!("duplicate `getset({})` attribute", mode)) + darling::Error::custom(format!("duplicate `getset({mode})` attribute")) .with_span(&span), ), Entry::Vacant(v) => { @@ -211,10 +208,17 @@ impl GetSetRawFieldAttr { // iroha doesn't use the latter form, so it is not supported by `iroha_ffi_derive` if attr.path().is_ident("getset") { let Some(list) = accumulator.handle(attr.meta.require_list().map_err(Into::into)) - else { continue }; - let Some(tokens): Option> - = accumulator.handle(list.parse_args_with(Punctuated::parse_terminated).map_err(Into::into)) - else { continue }; + else { + continue; + }; + let Some(tokens): Option> = + accumulator.handle( + list.parse_args_with(Punctuated::parse_terminated) + .map_err(Into::into), + ) + else { + continue; + }; for token in tokens { match token.token { @@ -248,7 +252,7 @@ impl GetSetRawFieldAttr { "getset attributes without `getset` prefix are not supported by iroha_ffi_derive", ) .with_span(attr), - ) + ); } } @@ -711,7 +715,7 @@ mod test { (GetSetGenMode::Get, GetSetOptions::default()), (GetSetGenMode::Set, GetSetOptions::default()), ]) - ) + ); } #[test] @@ -730,7 +734,7 @@ mod test { ..Default::default() }), ]) - ) + ); } #[test] @@ -749,7 +753,7 @@ mod test { with_prefix: true, }), ]) - ) + ); } } } diff --git a/ffi/derive/src/attr_parse/repr.rs b/ffi/derive/src/attr_parse/repr.rs index 770e0022753..f845fbd3393 100644 --- a/ffi/derive/src/attr_parse/repr.rs +++ b/ffi/derive/src/attr_parse/repr.rs @@ -78,7 +78,8 @@ impl Parse for SpannedReprToken { let Some((inside_of_group, group_span, after_group)) = after_token.group(Delimiter::Parenthesis) else { return Err(cursor.error("Expected a number inside of a `repr(aligned()), found `repr(aligned)`")); }; - span = span.join(group_span.span()).expect("Spans must be in the same file"); + + span = span.join(group_span.span()).unwrap_or(span); let alignment = syn2::parse2::(inside_of_group.token_stream())?; let alignment = alignment.base10_parse::()?; @@ -136,7 +137,7 @@ impl FromAttributes for Repr { ), Meta::List(list) => { let Some(tokens) = accumulator.handle( - syn2::parse2::(list.tokens.clone()).map_err(Into::into) + syn2::parse2::(list.tokens.clone()).map_err(Into::into), ) else { continue; }; diff --git a/ffi/derive/src/convert.rs b/ffi/derive/src/convert.rs index cb409c622af..9a3c256e337 100644 --- a/ffi/derive/src/convert.rs +++ b/ffi/derive/src/convert.rs @@ -4,21 +4,17 @@ use std::fmt::{Display, Formatter}; use darling::{ ast::Style, util::SpannedValue, FromAttributes, FromDeriveInput, FromField, FromVariant, }; +use iroha_macro_utils::{parse_single_list_attr_opt, Emitter}; use manyhow::{emit, error_message}; use proc_macro2::{Delimiter, Span, TokenStream}; use quote::quote; -use syn2::{ - parse::ParseStream, spanned::Spanned as _, visit::Visit as _, Attribute, Field, Ident, Meta, -}; +use syn2::{parse::ParseStream, spanned::Spanned as _, visit::Visit as _, Attribute, Field, Ident}; -use crate::{ - attr_parse::{ - derive::DeriveAttrs, - doc::DocAttrs, - getset::{GetSetFieldAttrs, GetSetStructAttrs}, - repr::{Repr, ReprKind, ReprPrimitive}, - }, - emitter::Emitter, +use crate::attr_parse::{ + derive::DeriveAttrs, + doc::DocAttrs, + getset::{GetSetFieldAttrs, GetSetStructAttrs}, + repr::{Repr, ReprKind, ReprPrimitive}, }; #[derive(Debug)] @@ -37,7 +33,7 @@ impl Display for FfiTypeToken { FfiTypeToken::UnsafeNonOwning => "#[ffi_type(unsafe {non_owning})]", FfiTypeToken::Local => "#[ffi_type(local)]", }; - write!(f, "{}", text) + write!(f, "{text}") } } @@ -51,7 +47,7 @@ impl syn2::parse::Parse for SpannedFfiTypeToken { fn parse(input: ParseStream) -> syn2::Result { let (span, token) = input.step(|cursor| { let Some((token, after_token)) = cursor.ident() else { - return Err(cursor.error("expected ffi type kind")) + return Err(cursor.error("expected ffi type kind")); }; let mut span = token.span(); @@ -60,26 +56,35 @@ impl syn2::parse::Parse for SpannedFfiTypeToken { "opaque" => Ok(((span, FfiTypeToken::Opaque), after_token)), "local" => Ok(((span, FfiTypeToken::Local), after_token)), "unsafe" => { - let Some((inside_of_group, group_span, after_group)) = after_token.group(Delimiter::Brace) else { - return Err(cursor.error("expected `{ ... }` after `unsafe`")) + let Some((inside_of_group, group_span, after_group)) = + after_token.group(Delimiter::Brace) + else { + return Err(cursor.error("expected `{ ... }` after `unsafe`")); }; - span = span.join(group_span.span()).expect("Spans must be in the same file"); + span = span.join(group_span.span()).unwrap_or(span); let Some((token, after_token)) = inside_of_group.ident() else { - return Err(cursor.error("expected ffi type kind")) + return Err(cursor.error("expected ffi type kind")); }; if !after_token.eof() { - return Err(cursor.error("`unsafe { ... }` should only contain one identifier inside")) + return Err(cursor + .error("`unsafe { ... }` should only contain one identifier inside")); } let token = token.to_string(); match token.as_str() { "robust" => Ok(((span, FfiTypeToken::UnsafeRobust), after_group)), "non_owning" => Ok(((span, FfiTypeToken::UnsafeNonOwning), after_group)), - other => Err(syn2::Error::new(token.span(), format!("unknown unsafe ffi type kind: {}", other))), + other => Err(syn2::Error::new( + token.span(), + format!("unknown unsafe ffi type kind: {other}"), + )), } } - other => Err(syn2::Error::new(span, format!("unknown unsafe ffi type kind: {}", other))), + other => Err(syn2::Error::new( + span, + format!("unknown unsafe ffi type kind: {other}"), + )), } })?; @@ -105,7 +110,7 @@ impl syn2::parse::Parse for FfiTypeKindAttribute { other => { return Err(syn2::Error::new( token.span, - format!("`{}` cannot be used on a type", other), + format!("`{other}` cannot be used on a type"), )) } }) @@ -127,7 +132,7 @@ impl syn2::parse::Parse for FfiTypeKindFieldAttribute { other => { return Err(syn2::Error::new( token.span, - format!("`{}` cannot be used on a field", other), + format!("`{other}` cannot be used on a field"), )) } }) @@ -135,47 +140,7 @@ impl syn2::parse::Parse for FfiTypeKindFieldAttribute { } } -fn parse_ffi_type_attr(attrs: &[Attribute]) -> darling::Result> { - let mut accumulator = darling::error::Accumulator::default(); - - // first, ensure there is only one "ffi_type" attribute (we don't support multiple) - let ffi_type_attrs = attrs - .iter() - .filter(|a| a.path().is_ident("ffi_type")) - .collect::>(); - let attr = match *ffi_type_attrs.as_slice() { - [] => { - return accumulator.finish_with(None); - } - [attr] => attr, - [attr, ref tail @ ..] => { - // allow parsing to proceed further to collect more errors - accumulator.push( - darling::Error::custom("Only one #[ffi_type] attribute is allowed!").with_span( - &tail - .iter() - .map(syn2::spanned::Spanned::span) - .reduce(|a, b| a.join(b).unwrap()) - .unwrap(), - ), - ); - attr - } - }; - - let mut kind = None; - - match &attr.meta { - Meta::Path(_) | Meta::NameValue(_) => accumulator.push(darling::Error::custom( - "Expected #[ffi_type(...)] attribute to be a list", - )), - Meta::List(list) => { - kind = accumulator.handle(syn2::parse2(list.tokens.clone()).map_err(Into::into)); - } - } - - accumulator.finish_with(kind) -} +const FFI_TYPE_ATTR: &str = "ffi_type"; pub struct FfiTypeAttr { pub kind: Option, @@ -183,7 +148,7 @@ pub struct FfiTypeAttr { impl FromAttributes for FfiTypeAttr { fn from_attributes(attrs: &[Attribute]) -> darling::Result { - parse_ffi_type_attr(attrs).map(|kind| Self { kind }) + parse_single_list_attr_opt(FFI_TYPE_ATTR, attrs).map(|kind| Self { kind }) } } @@ -193,7 +158,7 @@ pub struct FfiTypeFieldAttr { impl FromAttributes for FfiTypeFieldAttr { fn from_attributes(attrs: &[Attribute]) -> darling::Result { - parse_ffi_type_attr(attrs).map(|kind| Self { kind }) + parse_single_list_attr_opt(FFI_TYPE_ATTR, attrs).map(|kind| Self { kind }) } } @@ -289,7 +254,6 @@ pub fn derive_ffi_type(emitter: &mut Emitter, input: &syn2::DeriveInput) -> Toke }; let name = &input.ident; - if let darling::ast::Data::Enum(variants) = &input.data { if variants.is_empty() { emit!( @@ -321,7 +285,7 @@ pub fn derive_ffi_type(emitter: &mut Emitter, input: &syn2::DeriveInput) -> Toke emitter, &variant.span(), "Fieldless enums with explicit discriminants are prohibited", - ) + ); } derive_ffi_type_for_fieldless_enum( @@ -509,7 +473,7 @@ fn derive_ffi_type_for_data_carrying_enum( local: bool, ) -> TokenStream { let (repr_c_enum_name, repr_c_enum) = - gen_data_carrying_repr_c_enum(emitter, enum_name, &mut generics, variants); + gen_data_carrying_repr_c_enum(emitter, enum_name, &generics, variants); generics.make_where_clause(); let lifetime = quote! {'__iroha_ffi_itm}; @@ -625,12 +589,10 @@ fn derive_ffi_type_for_data_carrying_enum( let mut non_local_where_clause = where_clause.unwrap().clone(); for variant in variants { - let Some(ty) = variant_mapper( - emitter, variant, - || None, - |field| Some(field.ty.clone()) - ) else { - continue + let Some(ty) = + variant_mapper(emitter, variant, || None, |field| Some(field.ty.clone())) + else { + continue; }; non_local_where_clause.predicates.push( @@ -725,7 +687,7 @@ fn derive_ffi_type_for_repr_c(emitter: &mut Emitter, input: &FfiTypeInput) -> To fn gen_data_carrying_repr_c_enum( emitter: &mut Emitter, enum_name: &Ident, - generics: &mut syn2::Generics, + generics: &syn2::Generics, variants: &[SpannedValue], ) -> (Ident, TokenStream) { let (payload_name, payload) = @@ -756,7 +718,7 @@ fn gen_data_carrying_repr_c_enum( fn gen_data_carrying_enum_payload( emitter: &mut Emitter, enum_name: &Ident, - generics: &mut syn2::Generics, + generics: &syn2::Generics, variants: &[SpannedValue], ) -> (Ident, TokenStream) { let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); @@ -916,14 +878,22 @@ fn get_enum_repr_type( // it's an error to use an `#[derive(FfiType)]` on them // but we still want to generate a reasonable error message, so we check for it here if !is_empty { - emit!(emitter, enum_name, "Enum representation is not specified. Try adding `#[repr(u32)]` or similar"); + emit!( + emitter, + enum_name, + "Enum representation is not specified. Try adding `#[repr(u32)]` or similar" + ); } - return syn2::parse_quote! {u32} + return syn2::parse_quote! {u32}; }; let ReprKind::Primitive(primitive) = &*kind else { - emit!(emitter, &kind.span(), "Enum should have a primitive representation (like `#[repr(u32)]`)"); - return syn2::parse_quote! {u32} + emit!( + emitter, + &kind.span(), + "Enum should have a primitive representation (like `#[repr(u32)]`)" + ); + return syn2::parse_quote! {u32}; }; match primitive { diff --git a/ffi/derive/src/ffi_fn.rs b/ffi/derive/src/ffi_fn.rs index 3b414cdb8e0..f90a4763191 100644 --- a/ffi/derive/src/ffi_fn.rs +++ b/ffi/derive/src/ffi_fn.rs @@ -189,7 +189,7 @@ fn gen_input_conversion_stmts(fn_descriptor: &FnDescriptor) -> TokenStream { let mut stmts = quote! {}; if let Some(arg) = &fn_descriptor.receiver { - stmts = gen_arg_ffi_to_src(arg) + stmts = gen_arg_ffi_to_src(arg); } for arg in &fn_descriptor.input_args { diff --git a/ffi/derive/src/getset_gen.rs b/ffi/derive/src/getset_gen.rs index 6458c04a030..89c628de71b 100644 --- a/ffi/derive/src/getset_gen.rs +++ b/ffi/derive/src/getset_gen.rs @@ -1,6 +1,7 @@ use std::default::Default; use darling::ast::Style; +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::TokenStream; use quote::quote; @@ -13,7 +14,6 @@ use crate::{ getset::{GetSetGenMode, GetSetStructAttrs}, }, convert::{FfiTypeField, FfiTypeFields}, - emitter::Emitter, impl_visitor::{unwrap_result_type, Arg, FnDescriptor}, }; diff --git a/ffi/derive/src/impl_visitor.rs b/ffi/derive/src/impl_visitor.rs index 6c547b10020..4d7f8dde79d 100644 --- a/ffi/derive/src/impl_visitor.rs +++ b/ffi/derive/src/impl_visitor.rs @@ -2,6 +2,7 @@ //! //! It also defines descriptors - types that are used for the codegen step +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::Span; use syn2::{ @@ -11,8 +12,6 @@ use syn2::{ Attribute, Ident, Path, Type, Visibility, }; -use crate::emitter::Emitter; - pub struct Arg { self_ty: Option, name: Ident, @@ -407,14 +406,14 @@ impl<'ast> Visit<'ast> for FnVisitor<'ast, '_> { self.emitter, node.abi, "Extern fn declarations not supported" - ) + ); } if node.variadic.is_some() { emit!( self.emitter, node.variadic, "Variadic arguments not supported" - ) + ); } visit_signature(self, node); diff --git a/ffi/derive/src/lib.rs b/ffi/derive/src/lib.rs index 2bbd93b6489..aa2fd27550d 100644 --- a/ffi/derive/src/lib.rs +++ b/ffi/derive/src/lib.rs @@ -1,8 +1,7 @@ //! Crate containing FFI related macro functionality -#![allow(clippy::arithmetic_side_effects)] - use darling::FromDeriveInput; use impl_visitor::{FnDescriptor, ImplDescriptor}; +use iroha_macro_utils::Emitter; use manyhow::{emit, manyhow}; use proc_macro2::TokenStream; use quote::quote; @@ -12,12 +11,10 @@ use wrapper::wrap_method; use crate::{ attr_parse::derive::Derive, convert::{derive_ffi_type, FfiTypeData, FfiTypeInput}, - emitter::Emitter, }; mod attr_parse; mod convert; -mod emitter; mod ffi_fn; mod getset_gen; mod impl_visitor; diff --git a/ffi/derive/src/wrapper.rs b/ffi/derive/src/wrapper.rs index 17fcb77e083..01aec489542 100644 --- a/ffi/derive/src/wrapper.rs +++ b/ffi/derive/src/wrapper.rs @@ -1,3 +1,4 @@ +use iroha_macro_utils::Emitter; use manyhow::emit; use proc_macro2::{Span, TokenStream}; use quote::quote; @@ -6,7 +7,6 @@ use syn2::{parse_quote, visit_mut::VisitMut, Attribute, Ident, Type}; use crate::{ attr_parse::derive::{Derive, RustcDerive}, convert::FfiTypeInput, - emitter::Emitter, ffi_fn, getset_gen::{gen_resolve_type, gen_store_name}, impl_visitor::{unwrap_result_type, Arg, FnDescriptor, ImplDescriptor, TypeImplTraitResolver}, @@ -143,7 +143,7 @@ fn gen_shared_fns(emitter: &mut Emitter, input: &FfiTypeInput) -> Vec { shared_fn_impls.push(impl_clone_for_opaque(name, &input.generics)); @@ -169,7 +169,7 @@ fn gen_shared_fns(emitter: &mut Emitter, input: &FfiTypeInput) -> Vec { @@ -181,7 +181,7 @@ fn gen_shared_fns(emitter: &mut Emitter, input: &FfiTypeInput) -> Vec TokenStream { type Ref<#lifetime> = &#lifetime iroha_ffi::Extern where #(#lifetime_bounded_where_clause),*; type RefMut<#lifetime> = &#lifetime mut iroha_ffi::Extern where #(#lifetime_bounded_where_clause),*; type Box = Box; + type SliceBox = Box<[iroha_ffi::Extern]>; type SliceRef<#lifetime> = &#lifetime [iroha_ffi::ir::Transparent] where #(#lifetime_bounded_where_clause),*; type SliceRefMut<#lifetime> = &#lifetime mut [iroha_ffi::ir::Transparent] where #(#lifetime_bounded_where_clause),*; type Vec = Vec; diff --git a/ffi/src/ir.rs b/ffi/src/ir.rs index db49fae2b4a..6c99c9dbf69 100644 --- a/ffi/src/ir.rs +++ b/ffi/src/ir.rs @@ -105,6 +105,8 @@ pub trait IrTypeFamily { Self: 'itm; /// [`Ir`] type that [`Box`] is mapped into type Box; + /// [`Ir`] type that `Box<[T]>` is mapped into + type SliceBox; /// [`Ir`] type that `&[T]` is mapped into type SliceRef<'itm> where @@ -124,6 +126,7 @@ impl IrTypeFamily for R { // NOTE: Unused type RefMut<'itm> = () where Self: 'itm; type Box = Box; + type SliceBox = Box<[Self]>; type SliceRef<'itm> = &'itm [Self] where Self: 'itm; // NOTE: Unused type SliceRefMut<'itm> = () where Self: 'itm; @@ -134,6 +137,7 @@ impl IrTypeFamily for Robust { type Ref<'itm> = Transparent; type RefMut<'itm> = Transparent; type Box = Box; + type SliceBox = Box<[Self]>; type SliceRef<'itm> = &'itm [Self]; type SliceRefMut<'itm> = &'itm mut [Self]; type Vec = Vec; @@ -143,6 +147,7 @@ impl IrTypeFamily for Opaque { type Ref<'itm> = Transparent; type RefMut<'itm> = Transparent; type Box = Box; + type SliceBox = Box<[Self]>; type SliceRef<'itm> = &'itm [Self]; type SliceRefMut<'itm> = &'itm mut [Self]; type Vec = Vec; @@ -152,6 +157,7 @@ impl IrTypeFamily for Transparent { type Ref<'itm> = Self; type RefMut<'itm> = Self; type Box = Box; + type SliceBox = Box<[Self]>; type SliceRef<'itm> = &'itm [Self]; type SliceRefMut<'itm> = &'itm mut [Self]; type Vec = Vec; @@ -161,6 +167,7 @@ impl IrTypeFamily for &Extern { type Ref<'itm> = &'itm Self where Self: 'itm; type RefMut<'itm> = &'itm mut Self where Self: 'itm; type Box = Box; + type SliceBox = Box<[Self]>; type SliceRef<'itm> = &'itm [Self] where Self: 'itm; type SliceRefMut<'itm> = &'itm mut [Self] where Self: 'itm; type Vec = Vec; @@ -170,6 +177,7 @@ impl IrTypeFamily for &mut Extern { type Ref<'itm> = &'itm Self where Self: 'itm; type RefMut<'itm> = &'itm mut Self where Self: 'itm; type Box = Box; + type SliceBox = Box<[Self]>; type SliceRef<'itm> = &'itm [Self] where Self: 'itm; type SliceRefMut<'itm> = &'itm mut [Self] where Self: 'itm; type Vec = Vec; @@ -209,6 +217,12 @@ where { type Type = ::Box; } +impl Ir for Box<[R]> +where + R::Type: IrTypeFamily, +{ + type Type = ::SliceBox; +} impl<'itm, R: Ir> Ir for &'itm [R] where R::Type: IrTypeFamily, diff --git a/ffi/src/option.rs b/ffi/src/option.rs index 06b3b345f9b..676c20e376f 100644 --- a/ffi/src/option.rs +++ b/ffi/src/option.rs @@ -168,7 +168,7 @@ impl COutPtrWrite> for Option { // TODO: No need to zero the memory because it must never be read out_ptr.write(FfiTuple2(discriminant_out_ptr, unsafe { core::mem::zeroed() - })) + })); } Some(value) => { let mut discriminant_out_ptr = MaybeUninit::uninit(); diff --git a/ffi/src/primitives.rs b/ffi/src/primitives.rs index 1c53f998442..b6009b1f138 100644 --- a/ffi/src/primitives.rs +++ b/ffi/src/primitives.rs @@ -205,11 +205,8 @@ macro_rules! int128_derive { int128_derive! { u128 => FfiU128, i128 => FfiI128 } impl From for FfiU128 { - #[allow( - clippy::cast_possible_truncation, // Truncation is done on purpose - clippy::arithmetic_side_effects - )] - #[inline] + // Truncation is done on purpose + #[allow(clippy::cast_possible_truncation)] fn from(value: u128) -> Self { let lo = value as u64; let hi = (value >> 64) as u64; @@ -218,14 +215,10 @@ impl From for FfiU128 { } impl From for u128 { - #[allow( - clippy::cast_lossless, - clippy::cast_possible_truncation, // Truncation is done on purpose - clippy::arithmetic_side_effects - )] - #[inline] + // Truncation is done on purpose + #[allow(clippy::cast_possible_truncation)] fn from(FfiU128(FfiTuple2(hi, lo)): FfiU128) -> Self { - ((hi as u128) << 64) | (lo as u128) + (u128::from(hi) << 64) | u128::from(lo) } } @@ -259,7 +252,7 @@ mod tests { ]; for value in values { - assert_eq!(value, FfiU128::from(value).into()) + assert_eq!(value, FfiU128::from(value).into()); } } @@ -280,7 +273,7 @@ mod tests { ]; for value in values { - assert_eq!(value, FfiI128::from(value).into()) + assert_eq!(value, FfiI128::from(value).into()); } } } diff --git a/ffi/src/repr_c.rs b/ffi/src/repr_c.rs index ff0472c672e..b7896a7273f 100644 --- a/ffi/src/repr_c.rs +++ b/ffi/src/repr_c.rs @@ -285,7 +285,6 @@ impl<'itm, R: NonLocal + CTypeConvert<'itm, S, R::ReprC> + Clone + 'itm, S: C CTypeConvert::, _>::into_repr_c(self, store_borrow); // NOTE: None value indicates a bug in the implementation - #[allow(clippy::expect_used)] out_ptr.write(store.0.expect("Store must be initialized")); } } @@ -641,7 +640,7 @@ impl COutPtr for R { } impl COutPtrWrite for R { unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { - write_non_local::<_, Robust>(self, out_ptr) + write_non_local::<_, Robust>(self, out_ptr); } } impl COutPtrRead for R { @@ -688,6 +687,53 @@ impl COutPtrRead> for Box { } } +impl CType> for Box<[R]> { + type ReprC = SliceMut; +} +impl CTypeConvert<'_, Box<[Robust]>, SliceMut> for Box<[R]> { + type RustStore = Self; + type FfiStore = (); + + fn into_repr_c(self, store: &mut Self::RustStore) -> SliceMut { + *store = self; + SliceMut::from_slice(Some(store.as_mut())) + } + + unsafe fn try_from_repr_c(source: SliceMut, _: &mut ()) -> Result { + source + .into_rust() + .ok_or(FfiReturn::ArgIsNull) + .map(|slice| (&*slice).into()) + } +} + +impl CWrapperType> for Box<[R]> { + type InputType = Self; + type ReturnType = Self; +} +impl COutPtr> for Box<[R]> { + type OutPtr = OutBoxedSlice; +} +impl COutPtrWrite> for Box<[R]> { + unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { + let mut store = Box::default(); + CTypeConvert::, _>::into_repr_c(self, &mut store); + out_ptr.write(OutBoxedSlice::from_boxed_slice(Some(store))); + } +} +impl COutPtrRead> for Box<[R]> { + unsafe fn try_read_out(out_ptr: Self::OutPtr) -> Result { + let slice = SliceMut::from_raw_parts_mut(out_ptr.as_mut_ptr(), out_ptr.len()); + let res = CTypeConvert::, _>::try_from_repr_c(slice, &mut ()); + + if !out_ptr.deallocate() { + return Err(FfiReturn::TrapRepresentation); + } + + res + } +} + impl CType<&[Robust]> for &[R] { type ReprC = SliceRef; } @@ -713,7 +759,7 @@ impl COutPtr<&[Robust]> for &[R] { } impl COutPtrWrite<&[Robust]> for &[R] { unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { - write_non_local::<_, &[Robust]>(self, out_ptr) + write_non_local::<_, &[Robust]>(self, out_ptr); } } impl COutPtrRead<&[Robust]> for &[R] { @@ -747,7 +793,7 @@ impl COutPtr<&mut [Robust]> for &mut [R] { } impl COutPtrWrite<&mut [Robust]> for &mut [R] { unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { - write_non_local::<_, &mut [Robust]>(self, out_ptr) + write_non_local::<_, &mut [Robust]>(self, out_ptr); } } impl COutPtrRead<&mut [Robust]> for &mut [R] { @@ -836,7 +882,7 @@ impl COutPtr for R { } impl COutPtrWrite for R { unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { - write_non_local::<_, Opaque>(self, out_ptr) + write_non_local::<_, Opaque>(self, out_ptr); } } @@ -865,7 +911,51 @@ impl COutPtr> for Box { } impl COutPtrWrite> for Box { unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { - write_non_local::<_, Box>(self, out_ptr) + write_non_local::<_, Box>(self, out_ptr); + } +} + +impl CType> for Box<[R]> { + type ReprC = SliceMut<*mut R>; +} +impl CTypeConvert<'_, Box<[Opaque]>, SliceMut<*mut R>> for Box<[R]> { + type RustStore = Box<[*mut R]>; + type FfiStore = (); + + fn into_repr_c(self, store: &mut Self::RustStore) -> SliceMut<*mut R> { + *store = Vec::from(self) + .into_iter() + .map(|a: R| Box::new(a)) + .map(Box::into_raw) + .collect(); + + SliceMut::from_slice(Some(store)) + } + + unsafe fn try_from_repr_c(source: SliceMut<*mut R>, _: &mut ()) -> Result { + source + .into_rust() + .ok_or(FfiReturn::ArgIsNull)? + .iter() + .map(|&item| { + if let Some(item) = item.as_mut() { + return Ok(*Box::from_raw(item)); + } + + Err(FfiReturn::ArgIsNull) + }) + .collect::>() + } +} + +impl COutPtr> for Box<[R]> { + type OutPtr = OutBoxedSlice<*mut R>; +} +impl COutPtrWrite> for Box<[R]> { + unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { + let mut store = Box::default(); + CTypeConvert::, _>::into_repr_c(self, &mut store); + out_ptr.write(OutBoxedSlice::from_boxed_slice(Some(store))); } } @@ -1057,7 +1147,7 @@ impl COutPtr<[Opaque; N]> for [R; N] { } impl COutPtrWrite<[Opaque; N]> for [R; N] { unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { - write_non_local::<_, [Opaque; N]>(self, out_ptr) + write_non_local::<_, [Opaque; N]>(self, out_ptr); } } @@ -1082,6 +1172,10 @@ impl<'itm, R: External> CWrapperType> for Box<&'itm R> { type InputType = Box>; type ReturnType = Box>; } +impl<'itm, R: External> CWrapperType> for Box<[&'itm R]> { + type InputType = Box<[R::RefType<'itm>]>; + type ReturnType = Box<[R::RefType<'itm>]>; +} impl<'itm, R: External> CWrapperType<&'itm [&'itm Extern]> for &'itm [&'itm R] { type InputType = &'itm [R::RefType<'itm>]; type ReturnType = &'itm [R::RefType<'itm>]; @@ -1107,6 +1201,10 @@ impl<'itm, R: External> CWrapperType> for Box<&'itm mut R> type InputType = Box>; type ReturnType = Box>; } +impl<'itm, R: External> CWrapperType> for Box<[&'itm mut R]> { + type InputType = Box<[R::RefMutType<'itm>]>; + type ReturnType = Box<[R::RefMutType<'itm>]>; +} impl<'itm, R: External> CWrapperType<&'itm [&'itm mut Extern]> for &'itm [&'itm mut R] { type InputType = &'itm [R::RefMutType<'itm>]; type ReturnType = &'itm [R::RefMutType<'itm>]; @@ -1270,6 +1368,63 @@ where } } +impl CType> for Box<[R]> +where + Box<[R::Target]>: FfiType, +{ + type ReprC = as FfiType>::ReprC; +} +impl<'itm, R: Transmute, C: ReprC> CTypeConvert<'itm, Box<[Transparent]>, C> for Box<[R]> +where + Box<[R::Target]>: FfiConvert<'itm, C>, +{ + type RustStore = as FfiConvert<'itm, C>>::RustStore; + type FfiStore = as FfiConvert<'itm, C>>::FfiStore; + + fn into_repr_c(self, store: &'itm mut Self::RustStore) -> C { + transmute_into_target_boxed_slice(self).into_ffi(store) + } + + unsafe fn try_from_repr_c(source: C, store: &'itm mut Self::FfiStore) -> Result { + >::try_from_ffi(source, store) + .and_then(|output| transmute_from_target_boxed_slice(output)) + } +} + +impl CWrapperType> for Box<[R]> +where + Box<[R::Target]>: FfiWrapperType, + as FfiWrapperType>::InputType: WrapperTypeOf, + as FfiWrapperType>::ReturnType: WrapperTypeOf, +{ + type InputType = < as FfiWrapperType>::InputType as WrapperTypeOf>::Type; + type ReturnType = + < as FfiWrapperType>::ReturnType as WrapperTypeOf>::Type; +} +impl COutPtr> for Box<[R]> +where + Box<[R::Target]>: FfiOutPtr, +{ + type OutPtr = as FfiOutPtr>::OutPtr; +} +impl COutPtrWrite> for Box<[R]> +where + Box<[R::Target]>: FfiOutPtrWrite, +{ + unsafe fn write_out(self, out_ptr: *mut Self::OutPtr) { + FfiOutPtrWrite::write_out(transmute_into_target_boxed_slice(self), out_ptr); + } +} +impl COutPtrRead> for Box<[R]> +where + Box<[R::Target]>: FfiOutPtrRead, +{ + unsafe fn try_read_out(out_ptr: Self::OutPtr) -> Result { + >::try_read_out(out_ptr) + .and_then(|output| transmute_from_target_boxed_slice(output)) + } +} + impl<'slice, R: Transmute> CType<&'slice [Transparent]> for &'slice [R] where &'slice [R::Target]: FfiType, @@ -1454,6 +1609,11 @@ unsafe impl NonLocal> for Box where { } // SAFETY: Type doesn't return a reference to the store if the inner type doesn't +unsafe impl NonLocal> for Box<[R]> where + Box<[R::Target]>: Ir + NonLocal< as Ir>::Type> +{ +} +// SAFETY: Type doesn't return a reference to the store if the inner type doesn't unsafe impl<'slice, R: Transmute> NonLocal<&'slice [Transparent]> for &'slice [R] where &'slice [R::Target]: Ir + NonLocal<<&'slice [R::Target] as Ir>::Type> { @@ -1507,6 +1667,26 @@ unsafe fn transmute_from_target_box(source: Box) -> Res Ok(Box::from_raw(Box::into_raw(source).cast::())) } +#[allow(clippy::boxed_local)] +fn transmute_into_target_boxed_slice(mut source: Box<[R]>) -> Box<[R::Target]> { + let (ptr, len) = (source.as_mut_ptr().cast::(), source.len()); + // SAFETY: `R` is guaranteed to be transmutable into `R::Target` + unsafe { Box::from_raw(core::slice::from_raw_parts_mut(ptr, len)) } +} +#[allow(clippy::boxed_local)] +unsafe fn transmute_from_target_boxed_slice( + mut source: Box<[R::Target]>, +) -> Result> { + if !source.iter().all(|item| R::is_valid(item)) { + return Err(FfiReturn::TrapRepresentation); + } + + Ok(Box::from_raw(core::slice::from_raw_parts_mut( + source.as_mut_ptr().cast(), + source.len(), + ))) +} + fn transmute_into_target_slice_ref(source: &[R]) -> &[R::Target] { let (ptr, len) = (source.as_ptr().cast::(), source.len()); // SAFETY: `R` is guaranteed to be transmutable into `R::Target` diff --git a/ffi/src/slice.rs b/ffi/src/slice.rs index 1583e915e0b..d66cf3d0200 100644 --- a/ffi/src/slice.rs +++ b/ffi/src/slice.rs @@ -165,6 +165,17 @@ impl OutBoxedSlice { self.1 } + /// Create [`Self`] from a `Box<[T]>` + pub fn from_boxed_slice(source: Option>) -> Self { + source.map_or_else( + || Self(core::ptr::null_mut(), 0), + |boxed_slice| { + let mut boxed_slice = core::mem::ManuallyDrop::new(boxed_slice); + Self(boxed_slice.as_mut_ptr(), boxed_slice.len()) + }, + ) + } + /// Create [`Self`] from a `Vec` pub fn from_vec(source: Option>) -> Self { source.map_or_else( diff --git a/ffi/tests/export_getset.rs b/ffi/tests/export_getset.rs index 3a059d39c30..bb8bbf0cba5 100644 --- a/ffi/tests/export_getset.rs +++ b/ffi/tests/export_getset.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction, clippy::pedantic)] +#![allow(unsafe_code)] use std::mem::MaybeUninit; diff --git a/ffi/tests/export_shared_fns.rs b/ffi/tests/export_shared_fns.rs index 0103a171646..b254c657d42 100644 --- a/ffi/tests/export_shared_fns.rs +++ b/ffi/tests/export_shared_fns.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction)] +#![allow(unsafe_code)] use std::{cmp::Ordering, mem::MaybeUninit}; diff --git a/ffi/tests/ffi_export.rs b/ffi/tests/ffi_export.rs index fc3a661fcc6..da6ebaa4d20 100644 --- a/ffi/tests/ffi_export.rs +++ b/ffi/tests/ffi_export.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction, clippy::pedantic)] +#![allow(unsafe_code, clippy::pedantic)] use std::{alloc, collections::BTreeMap, mem::MaybeUninit}; @@ -55,7 +55,7 @@ pub enum DataCarryingEnum { D, } -/// ReprC struct +/// `ReprC` struct #[derive(Clone, Copy, PartialEq, Eq, FfiType)] #[repr(C)] pub struct RobustReprCStruct { @@ -128,6 +128,12 @@ impl OpaqueStruct { } } +#[ffi_export] +/// Take and return boxed slice +pub fn freestanding_with_boxed_slice(item: Box<[u8]>) -> Box<[u8]> { + item +} + #[ffi_export] /// Take and return byte pub fn freestanding_with_option(item: Option) -> Option { @@ -344,7 +350,7 @@ fn into_iter_item_impl_into() { ]; let mut ffi_struct = get_new_struct(); - let mut tokens_store = Default::default(); + let mut tokens_store = Vec::default(); let tokens_ffi = tokens.clone().into_ffi(&mut tokens_store); let mut output = MaybeUninit::new(core::ptr::null_mut()); @@ -428,6 +434,29 @@ fn return_option() { } } +#[test] +#[webassembly_test::webassembly_test] +fn take_and_return_boxed_slice() { + let input: Box<[u8]> = [12u8, 42u8].into(); + let mut output = MaybeUninit::new(OutBoxedSlice::from_raw_parts(core::ptr::null_mut(), 0)); + let mut in_store = Default::default(); + + unsafe { + assert_eq!( + FfiReturn::Ok, + __freestanding_with_boxed_slice( + FfiConvert::into_ffi(input, &mut in_store), + output.as_mut_ptr() + ) + ); + + let output = output.assume_init(); + assert_eq!(output.len(), 2); + let boxed_slice = Box::<[u8]>::try_read_out(output).expect("Valid"); + assert_eq!(boxed_slice, [12u8, 42u8].into()); + } +} + #[test] #[webassembly_test::webassembly_test] fn take_and_return_option_without_niche() { @@ -550,7 +579,7 @@ fn return_empty_tuple_result() { #[webassembly_test::webassembly_test] fn array_to_pointer() { let array = [1_u8]; - let mut store = Default::default(); + let mut store = Option::default(); let ptr: *mut [u8; 1] = array.into_ffi(&mut store); let mut output = MaybeUninit::new([0_u8]); @@ -767,7 +796,7 @@ fn return_vec_of_boxed_opaques() { fn array_of_opaques() { let input = [OpaqueStruct::default(), OpaqueStruct::default()]; let mut output = MaybeUninit::new([core::ptr::null_mut(), core::ptr::null_mut()]); - let mut store = Default::default(); + let mut store = Option::default(); unsafe { assert_eq!( diff --git a/ffi/tests/ffi_export_import_u128_i128.rs b/ffi/tests/ffi_export_import_u128_i128.rs index a4c7b5352fa..cbeb7892a47 100644 --- a/ffi/tests/ffi_export_import_u128_i128.rs +++ b/ffi/tests/ffi_export_import_u128_i128.rs @@ -130,7 +130,7 @@ fn i128_slice_conversion() { fn u128_vec_conversion() { let values = u128_values().to_vec(); - assert_eq!(values, freestanding_u128_vec(values.clone())) + assert_eq!(values, freestanding_u128_vec(values.clone())); } #[test] @@ -138,7 +138,7 @@ fn u128_vec_conversion() { fn i128_vec_conversion() { let values = i128_values().to_vec(); - assert_eq!(values, freestanding_i128_vec(values.clone())) + assert_eq!(values, freestanding_i128_vec(values.clone())); } #[test] @@ -147,7 +147,7 @@ fn u128_box_conversion() { let values = u128_values(); for value in values { let value = Box::new(value); - assert_eq!(value, freestanding_u128_box(value.clone())) + assert_eq!(value, freestanding_u128_box(value.clone())); } } @@ -158,7 +158,7 @@ fn i128_box_conversion() { for value in values { let value = Box::new(value); - assert_eq!(value, freestanding_i128_box(value.clone())) + assert_eq!(value, freestanding_i128_box(value.clone())); } } @@ -167,7 +167,7 @@ fn i128_box_conversion() { fn u128_array_conversion() { let values = u128_values(); - assert_eq!(values, freestanding_u128_array(values)) + assert_eq!(values, freestanding_u128_array(values)); } #[test] @@ -175,5 +175,5 @@ fn u128_array_conversion() { fn i128_array_conversion() { let values = i128_values(); - assert_eq!(values, freestanding_i128_array(values)) + assert_eq!(values, freestanding_i128_array(values)); } diff --git a/ffi/tests/ffi_import.rs b/ffi/tests/ffi_import.rs index 041c44e16fd..4d560620ad9 100644 --- a/ffi/tests/ffi_import.rs +++ b/ffi/tests/ffi_import.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction, clippy::pedantic)] +#![allow(unsafe_code)] use iroha_ffi::{ffi, ffi_import, LocalRef, LocalSlice}; @@ -25,6 +25,11 @@ pub fn freestanding_returns_local_slice(input: &[(u32, u32)]) -> &[(u32, u32)] { unreachable!("replaced by ffi_import") } +#[ffi_import] +pub fn freestanding_returns_boxed_slice(input: Box<[u32]>) -> Box<[u32]> { + unreachable!("replaced by ffi_import") +} + #[ffi_import] pub fn freestanding_returns_iterator( input: impl IntoIterator, @@ -76,6 +81,14 @@ fn vec_of_tuples_is_coppied_when_returned() { assert_eq!(in_tuple, *out_tuple); } +#[test] +#[webassembly_test::webassembly_test] +fn boxed_slice_of_primitives() { + let in_boxed_slice = vec![420_u32, 420_u32].into_boxed_slice(); + let out_boxed_slice: Box<[u32]> = freestanding_returns_boxed_slice(in_boxed_slice.clone()); + assert_eq!(in_boxed_slice, out_boxed_slice); +} + #[test] #[webassembly_test::webassembly_test] fn return_iterator() { @@ -147,11 +160,21 @@ mod ffi { input: SliceRef>, output: *mut OutBoxedSlice>, ) -> FfiReturn { - let input = input.into_rust().map(|slice| slice.to_vec()); + let input = input.into_rust().map(<[_]>::to_vec); output.write(OutBoxedSlice::from_vec(input)); FfiReturn::Ok } + #[no_mangle] + unsafe extern "C" fn __freestanding_returns_boxed_slice( + input: SliceMut, + output: *mut OutBoxedSlice, + ) -> FfiReturn { + let input = input.into_rust().map(|slice| (&*slice).into()); + output.write(OutBoxedSlice::from_boxed_slice(input)); + FfiReturn::Ok + } + #[no_mangle] unsafe extern "C" fn __freestanding_returns_iterator( input: SliceMut, diff --git a/ffi/tests/ffi_import_opaque.rs b/ffi/tests/ffi_import_opaque.rs index 8567258092b..c9ffe7c4fb4 100644 --- a/ffi/tests/ffi_import_opaque.rs +++ b/ffi/tests/ffi_import_opaque.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction, clippy::pedantic)] +#![allow(unsafe_code)] use std::collections::BTreeMap; @@ -130,10 +130,12 @@ fn fallible_output() { //assert!(OpaqueStruct::fallible_int_output(false).is_err()); } +#[allow(trivial_casts)] fn compare_opaque_eq(opaque1: &T, opaque2: &T) { unsafe { - let opaque1: &*const U = core::mem::transmute(opaque1); - let opaque2: &*const U = core::mem::transmute(opaque2); + let opaque1: &*const U = &*(opaque1 as *const T).cast::<*const U>(); + let opaque2: &*const U = &*(opaque2 as *const T).cast::<*const U>(); + assert_eq!(**opaque1, **opaque2) } } @@ -203,7 +205,7 @@ mod ffi { output: *mut *mut ExternOpaqueStruct, ) -> iroha_ffi::FfiReturn { let mut handle = *Box::from_raw(handle); - let mut store = Default::default(); + let mut store = Vec::default(); let params: Vec<(u8, ExternValue)> = FfiConvert::try_from_ffi(params, &mut store).expect("Valid"); handle.params = params.into_iter().collect(); diff --git a/ffi/tests/generics.rs b/ffi/tests/generics.rs index 2d92b2e8205..bd0b551f899 100644 --- a/ffi/tests/generics.rs +++ b/ffi/tests/generics.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction, clippy::pedantic)] +#![allow(unsafe_code)] use std::mem::MaybeUninit; diff --git a/ffi/tests/import_getset.rs b/ffi/tests/import_getset.rs index 0f0962e7988..56d5a733dae 100644 --- a/ffi/tests/import_getset.rs +++ b/ffi/tests/import_getset.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction, clippy::pedantic)] +#![allow(unsafe_code)] use iroha_ffi::{ffi, ffi_import}; diff --git a/ffi/tests/import_shared_fns.rs b/ffi/tests/import_shared_fns.rs index df0c232d631..cab3d5fd658 100644 --- a/ffi/tests/import_shared_fns.rs +++ b/ffi/tests/import_shared_fns.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction)] +#![allow(unsafe_code)] use iroha_ffi::{ffi, ffi_import}; diff --git a/ffi/tests/transparent.rs b/ffi/tests/transparent.rs index 1ef58685226..01f93912add 100644 --- a/ffi/tests/transparent.rs +++ b/ffi/tests/transparent.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction, clippy::pedantic)] +#![allow(unsafe_code)] use std::{alloc, marker::PhantomData, mem::MaybeUninit}; @@ -69,6 +69,7 @@ impl TransparentStruct { } } + #[must_use] pub fn with_payload(mut self, payload: GenericTransparentStruct<()>) -> Self { self.payload = payload; self @@ -167,7 +168,7 @@ fn transparent_vec_to_vec() { TransparentStruct::new(GenericTransparentStruct::new(3)), ]; - let mut store = Default::default(); + let mut store = Vec::default(); let mut output = MaybeUninit::new(OutBoxedSlice::from_raw_parts(core::ptr::null_mut(), 0)); unsafe { diff --git a/ffi/tests/unambiguous.rs b/ffi/tests/unambiguous.rs index 167d375a20b..4392fb48e2f 100644 --- a/ffi/tests/unambiguous.rs +++ b/ffi/tests/unambiguous.rs @@ -1,4 +1,4 @@ -#![allow(unsafe_code, clippy::restriction, clippy::pedantic)] +#![allow(unsafe_code)] use std::mem::MaybeUninit; @@ -13,7 +13,7 @@ pub enum Ambiguous { None, } -/// FfiStruct +/// `FfiStruct` #[derive(Clone, Copy, FfiType)] pub struct FfiStruct; diff --git a/flake.nix b/flake.nix index dc8f5c417e2..3b067fee410 100755 --- a/flake.nix +++ b/flake.nix @@ -161,8 +161,11 @@ inherit mkIroha; packages.default = mkIroha {}; - - packages.appimage = nix-appimage.mkappimage.${system} { drv = mkIroha {}; name="iroha"; }; + + packages.appimage = nix-appimage.mkappimage.${system} { + drv = mkIroha {}; + name = "iroha"; + }; packages.targets = builtins.listToAttrs (map (target: { name = target; @@ -193,16 +196,27 @@ formatter = alejandra.packages.${system}.default; - devShells.default = pkgs.mkShell { - nativeBuildInputs = with pkgs; [ - pkg-config - openssl.dev - libiconvReal - zlib - (fenix'.toolchainOf toolchainSpec).completeToolchain + devShells.default = let + toolchainPkgs = fenix'.toolchainOf toolchainSpec; + toolchain = fenix'.combine [ + toolchainPkgs.rustc + toolchainPkgs.cargo + toolchainPkgs.clippy + toolchainPkgs.rustfmt + toolchainPkgs.rust-std ]; + in + pkgs.mkShell { + nativeBuildInputs = with pkgs; [ + pkg-config + openssl.dev + libiconvReal + zlib + toolchain + fenix'.rust-analyzer + ]; - IROHA_SKIP_WASM_CHECKS = true; - }; + IROHA_SKIP_WASM_CHECKS = true; + }; }); } diff --git a/futures/Cargo.toml b/futures/Cargo.toml index a9cc5e8f75a..9ac3a719bfa 100644 --- a/futures/Cargo.toml +++ b/futures/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [features] default = [] # Support lightweight telemetry, including diagnostics @@ -23,4 +26,4 @@ serde = { workspace = true, features = ["derive"] } tokio = { workspace = true, features = ["rt", "rt-multi-thread", "macros"] } [dev-dependencies] -tokio-stream = "0.1.11" +tokio-stream = "0.1.14" diff --git a/futures/derive/Cargo.toml b/futures/derive/Cargo.toml index b1850f24d02..cd0afdb1cb0 100644 --- a/futures/derive/Cargo.toml +++ b/futures/derive/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [features] default = ["telemetry"] # Support lightweight telemetry, including diagnostics @@ -16,7 +19,9 @@ telemetry = [] proc-macro = true [dependencies] -syn = { workspace = true, features = ["default", "full"] } +iroha_macro_utils = { workspace = true } + +syn2 = { workspace = true, features = ["default", "full"] } quote = { workspace = true } proc-macro2 = { workspace = true } -proc-macro-error = { workspace = true } +manyhow = { workspace = true } diff --git a/futures/derive/src/lib.rs b/futures/derive/src/lib.rs index e4c14935b35..5212c38efe2 100644 --- a/futures/derive/src/lib.rs +++ b/futures/derive/src/lib.rs @@ -1,25 +1,19 @@ //! Crate with derive macros for futures - -#![allow( - clippy::expect_used, - clippy::str_to_string, - clippy::std_instead_of_core -)] - -use proc_macro::TokenStream; -use proc_macro2::TokenStream as TokenStream2; -use proc_macro_error::{abort, proc_macro_error}; +use iroha_macro_utils::Emitter; +use manyhow::{emit, manyhow}; +use proc_macro2::TokenStream; use quote::quote; -use syn::{parse_macro_input, Generics, ItemFn, ReturnType, Signature}; +use syn2::{Generics, ItemFn, ReturnType, Signature}; fn impl_telemetry_future( + emitter: &mut Emitter, ItemFn { attrs, vis, sig, block, }: ItemFn, -) -> TokenStream2 { +) -> TokenStream { let Signature { asyncness, ident, @@ -34,8 +28,9 @@ fn impl_telemetry_future( } = sig; if asyncness.is_none() { - abort!( - asyncness, + emit!( + emitter, + ident, "Only async functions can be instrumented for `telemetry_future`" ); } @@ -57,14 +52,23 @@ fn impl_telemetry_future( } /// Macro for wrapping future for getting telemetry info about poll times and numbers -#[proc_macro_error] +#[manyhow] #[proc_macro_attribute] -pub fn telemetry_future(_args: TokenStream, input: TokenStream) -> TokenStream { - let input = parse_macro_input!(input as ItemFn); - if cfg!(feature = "telemetry") { - impl_telemetry_future(input) +pub fn telemetry_future(args: TokenStream, input: TokenStream) -> TokenStream { + let mut emitter = Emitter::new(); + + if !args.is_empty() { + emit!(emitter, args, "Unexpected arguments"); + } + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + let result = if cfg!(feature = "telemetry") { + impl_telemetry_future(&mut emitter, input) } else { quote! { #input } - } - .into() + }; + + emitter.finish_token_stream_with(result) } diff --git a/futures/src/lib.rs b/futures/src/lib.rs index c8a01486346..f45fa002b71 100644 --- a/futures/src/lib.rs +++ b/futures/src/lib.rs @@ -1,5 +1,4 @@ //! Crate with various iroha futures -#![allow(clippy::std_instead_of_core, clippy::std_instead_of_alloc)] use std::{ future::Future, pin::Pin, @@ -50,7 +49,6 @@ pub struct TelemetryConversionError; impl TryFrom<&Telemetry> for FuturePollTelemetry { type Error = TelemetryConversionError; - #[allow(clippy::unwrap_in_result, clippy::unwrap_used)] fn try_from( Telemetry { target, fields }: &Telemetry, ) -> Result { @@ -85,7 +83,6 @@ impl TryFrom<&Telemetry> for FuturePollTelemetry { impl TryFrom for FuturePollTelemetry { type Error = TelemetryConversionError; - #[allow(clippy::unwrap_in_result, clippy::unwrap_used)] fn try_from(Telemetry { target, fields }: Telemetry) -> Result { if target != "iroha_futures" && fields.len() != 3 { return Err(TelemetryConversionError); diff --git a/futures/tests/basic.rs b/futures/tests/basic.rs index c3a17c6ba7a..a1514e01ab1 100644 --- a/futures/tests/basic.rs +++ b/futures/tests/basic.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::{thread, time::Duration}; use iroha_config::base::proxy::Builder; diff --git a/genesis/Cargo.toml b/genesis/Cargo.toml index 32a2ec623c2..994afeb51a9 100644 --- a/genesis/Cargo.toml +++ b/genesis/Cargo.toml @@ -1,8 +1,14 @@ [package] name = "iroha_genesis" -version = "2.0.0-pre-rc.19" -authors = ["Iroha 2 team "] -edition = "2021" + +edition.workspace = true +version.workspace = true +authors.workspace = true + +license.workspace = true + +[lints] +workspace = true [dependencies] iroha_config = { workspace = true } diff --git a/genesis/src/lib.rs b/genesis/src/lib.rs index ca3a1e3507d..5fd2f1fc56a 100644 --- a/genesis/src/lib.rs +++ b/genesis/src/lib.rs @@ -1,13 +1,5 @@ //! Genesis-related logic and constructs. Contains the `GenesisBlock`, //! `RawGenesisBlock` and the `RawGenesisBlockBuilder` structures. -#![allow( - clippy::module_name_repetitions, - clippy::new_without_default, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc, - clippy::arithmetic_side_effects -)] - use std::{ fmt::Debug, fs::{self, File}, @@ -21,8 +13,8 @@ use iroha_config::genesis::Configuration; use iroha_crypto::{KeyPair, PublicKey}; use iroha_data_model::{ asset::AssetDefinition, + executor::Executor, prelude::{Metadata, *}, - validator::Validator, }; use iroha_schema::IntoSchema; use once_cell::sync::Lazy; @@ -37,7 +29,7 @@ pub static GENESIS_ACCOUNT_ID: Lazy = /// Genesis transaction #[derive(Debug, Clone)] -pub struct GenesisTransaction(pub VersionedSignedTransaction); +pub struct GenesisTransaction(pub SignedTransaction); /// [`GenesisNetwork`] contains initial transactions and genesis setup related parameters. #[derive(Debug, Clone)] @@ -66,10 +58,10 @@ impl GenesisNetwork { .ok_or_else(|| eyre!("Genesis account private key is empty."))?, )?; #[cfg(not(test))] - // First instruction should be Validator upgrade. + // First instruction should be Executor upgrade. // This makes possible to grant permissions to users in genesis. let transactions_iter = std::iter::once(GenesisTransactionBuilder { - isi: vec![UpgradeBox::new(Validator::try_from(raw_block.validator)?).into()], + isi: vec![UpgradeExpr::new(Executor::try_from(raw_block.executor)?).into()], }) .chain(raw_block.transactions.into_iter()); @@ -101,8 +93,8 @@ impl GenesisNetwork { pub struct RawGenesisBlock { /// Transactions transactions: Vec, - /// Runtime Validator - validator: ValidatorMode, + /// Runtime Executor + executor: ExecutorMode, } impl RawGenesisBlock { @@ -127,7 +119,7 @@ impl RawGenesisBlock { "Failed to deserialize raw genesis block from {:?}", &path ))?; - raw_genesis_block.validator.set_genesis_path(path); + raw_genesis_block.executor.set_genesis_path(path); Ok(raw_genesis_block) } @@ -137,18 +129,18 @@ impl RawGenesisBlock { } } -/// Ways to provide validator either directly as base64 encoded string or as path to wasm file +/// Ways to provide executor either directly as base64 encoded string or as path to wasm file #[derive(Debug, Clone, From, Deserialize, Serialize, IntoSchema)] #[serde(untagged)] -pub enum ValidatorMode { - /// Path to validator wasm file +pub enum ExecutorMode { + /// Path to executor wasm file // In the first place to initially try to parse path - Path(ValidatorPath), - /// Validator encoded as base64 string - Inline(Validator), + Path(ExecutorPath), + /// Executor encoded as base64 string + Inline(Executor), } -impl ValidatorMode { +impl ExecutorMode { fn set_genesis_path(&mut self, genesis_path: impl AsRef) { if let Self::Path(path) = self { path.set_genesis_path(genesis_path); @@ -156,38 +148,38 @@ impl ValidatorMode { } } -impl TryFrom for Validator { +impl TryFrom for Executor { type Error = ErrReport; - fn try_from(value: ValidatorMode) -> Result { + fn try_from(value: ExecutorMode) -> Result { match value { - ValidatorMode::Inline(validator) => Ok(validator), - ValidatorMode::Path(ValidatorPath(relative_validator_path)) => { - let wasm = fs::read(&relative_validator_path) - .wrap_err(format!("Failed to open {:?}", &relative_validator_path))?; - Ok(Validator::new(WasmSmartContract::from_compiled(wasm))) + ExecutorMode::Inline(executor) => Ok(executor), + ExecutorMode::Path(ExecutorPath(relative_executor_path)) => { + let wasm = fs::read(&relative_executor_path) + .wrap_err(format!("Failed to open {:?}", &relative_executor_path))?; + Ok(Executor::new(WasmSmartContract::from_compiled(wasm))) } } } } -/// Path to the validator relative to genesis location +/// Path to the executor relative to genesis location /// /// If path is absolute it will be used directly otherwise it will be treated as relative to genesis location. #[derive(Debug, Clone, Deserialize, Serialize, IntoSchema)] #[schema(transparent = "String")] #[serde(transparent)] #[repr(transparent)] -pub struct ValidatorPath(pub PathBuf); +pub struct ExecutorPath(pub PathBuf); -impl ValidatorPath { +impl ExecutorPath { fn set_genesis_path(&mut self, genesis_path: impl AsRef) { - let path_to_validator = genesis_path + let path_to_executor = genesis_path .as_ref() .parent() .expect("Genesis must be in some directory") .join(&self.0); - self.0 = path_to_validator; + self.0 = path_to_executor; } } @@ -198,25 +190,25 @@ impl ValidatorPath { #[repr(transparent)] pub struct GenesisTransactionBuilder { /// Instructions - isi: Vec, + isi: Vec, } impl GenesisTransactionBuilder { - /// Convert [`GenesisTransactionBuilder`] into [`VersionedSignedTransaction`] with signature. + /// Convert [`GenesisTransactionBuilder`] into [`SignedTransaction`] with signature. /// /// # Errors /// Fails if signing or accepting fails. pub fn sign( self, genesis_key_pair: KeyPair, - ) -> core::result::Result { + ) -> core::result::Result { TransactionBuilder::new(GENESIS_ACCOUNT_ID.clone()) .with_instructions(self.isi) .sign(genesis_key_pair) } /// Add new instruction to the transaction. - pub fn append_instruction(&mut self, instruction: InstructionBox) { + pub fn append_instruction(&mut self, instruction: InstructionExpr) { self.isi.push(instruction); } } @@ -241,37 +233,38 @@ pub struct RawGenesisDomainBuilder { state: S, } -mod validator_state { - use super::ValidatorMode; +mod executor_state { + use super::ExecutorMode; #[cfg_attr(test, derive(Clone))] - pub struct Set(pub ValidatorMode); + pub struct Set(pub ExecutorMode); #[derive(Clone, Copy)] pub struct Unset; } -impl RawGenesisBlockBuilder { - /// Initiate the building process. - pub fn new() -> Self { +impl Default for RawGenesisBlockBuilder { + fn default() -> Self { // Do not add `impl Default`. While it can technically be // regarded as a default constructor, this builder should not // be used in contexts where `Default::default()` is likely to // be called. Self { transaction: GenesisTransactionBuilder { isi: Vec::new() }, - state: validator_state::Unset, + state: executor_state::Unset, } } +} - /// Set the validator. - pub fn validator( +impl RawGenesisBlockBuilder { + /// Set the executor. + pub fn executor( self, - validator: impl Into, - ) -> RawGenesisBlockBuilder { + executor: impl Into, + ) -> RawGenesisBlockBuilder { RawGenesisBlockBuilder { transaction: self.transaction, - state: validator_state::Set(validator.into()), + state: executor_state::Set(executor.into()), } } } @@ -294,7 +287,7 @@ impl RawGenesisBlockBuilder { let new_domain = Domain::new(domain_id.clone()).with_metadata(metadata); self.transaction .isi - .push(RegisterBox::new(new_domain).into()); + .push(RegisterExpr::new(new_domain).into()); RawGenesisDomainBuilder { transaction: self.transaction, domain_id, @@ -303,12 +296,12 @@ impl RawGenesisBlockBuilder { } } -impl RawGenesisBlockBuilder { +impl RawGenesisBlockBuilder { /// Finish building and produce a `RawGenesisBlock`. pub fn build(self) -> RawGenesisBlock { RawGenesisBlock { transactions: vec![self.transaction], - validator: self.state.0, + executor: self.state.0, } } } @@ -325,11 +318,11 @@ impl RawGenesisDomainBuilder { /// Add an account to this domain without a public key. #[cfg(test)] - pub fn account_without_public_key(mut self, account_name: Name) -> Self { + fn account_without_public_key(mut self, account_name: Name) -> Self { let account_id = AccountId::new(account_name, self.domain_id.clone()); self.transaction .isi - .push(RegisterBox::new(Account::new(account_id, [])).into()); + .push(RegisterExpr::new(Account::new(account_id, [])).into()); self } @@ -347,7 +340,7 @@ impl RawGenesisDomainBuilder { ) -> Self { let account_id = AccountId::new(account_name, self.domain_id.clone()); let register = - RegisterBox::new(Account::new(account_id, [public_key]).with_metadata(metadata)); + RegisterExpr::new(Account::new(account_id, [public_key]).with_metadata(metadata)); self.transaction.isi.push(register.into()); self } @@ -363,7 +356,7 @@ impl RawGenesisDomainBuilder { }; self.transaction .isi - .push(RegisterBox::new(asset_definition).into()); + .push(RegisterExpr::new(asset_definition).into()); self } } @@ -374,21 +367,20 @@ mod tests { use super::*; - fn dummy_validator() -> ValidatorMode { - ValidatorMode::Path(ValidatorPath("./validator.wasm".into())) + fn dummy_executor() -> ExecutorMode { + ExecutorMode::Path(ExecutorPath("./executor.wasm".into())) } #[test] - #[allow(clippy::expect_used)] fn load_new_genesis_block() -> Result<()> { let (genesis_public_key, genesis_private_key) = KeyPair::generate()?.into(); let (alice_public_key, _) = KeyPair::generate()?.into(); let _genesis_block = GenesisNetwork::from_configuration( - RawGenesisBlockBuilder::new() + RawGenesisBlockBuilder::default() .domain("wonderland".parse()?) .account("alice".parse()?, alice_public_key) .finish_domain() - .validator(dummy_validator()) + .executor(dummy_executor()) .build(), Some( &ConfigurationProxy { @@ -402,11 +394,10 @@ mod tests { Ok(()) } - #[allow(clippy::unwrap_used)] #[test] fn genesis_block_builder_example() { let public_key = "ed0120204E9593C3FFAF4464A6189233811C297DD4CE73ABA167867E4FBD4F8C450ACB"; - let mut genesis_builder = RawGenesisBlockBuilder::new(); + let mut genesis_builder = RawGenesisBlockBuilder::default(); genesis_builder = genesis_builder .domain("wonderland".parse().unwrap()) @@ -421,17 +412,17 @@ mod tests { .asset("hats".parse().unwrap(), AssetValueType::BigQuantity) .finish_domain(); - // In real cases validator should be constructed from a wasm blob - let finished_genesis_block = genesis_builder.validator(dummy_validator()).build(); + // In real cases executor should be constructed from a wasm blob + let finished_genesis_block = genesis_builder.executor(dummy_executor()).build(); { let domain_id: DomainId = "wonderland".parse().unwrap(); assert_eq!( finished_genesis_block.transactions[0].isi[0], - RegisterBox::new(Domain::new(domain_id.clone())).into() + RegisterExpr::new(Domain::new(domain_id.clone())).into() ); assert_eq!( finished_genesis_block.transactions[0].isi[1], - RegisterBox::new(Account::new( + RegisterExpr::new(Account::new( AccountId::new("alice".parse().unwrap(), domain_id.clone()), [] )) @@ -439,7 +430,7 @@ mod tests { ); assert_eq!( finished_genesis_block.transactions[0].isi[2], - RegisterBox::new(Account::new( + RegisterExpr::new(Account::new( AccountId::new("bob".parse().unwrap(), domain_id), [] )) @@ -450,11 +441,11 @@ mod tests { let domain_id: DomainId = "tulgey_wood".parse().unwrap(); assert_eq!( finished_genesis_block.transactions[0].isi[3], - RegisterBox::new(Domain::new(domain_id.clone())).into() + RegisterExpr::new(Domain::new(domain_id.clone())).into() ); assert_eq!( finished_genesis_block.transactions[0].isi[4], - RegisterBox::new(Account::new( + RegisterExpr::new(Account::new( AccountId::new("Cheshire_Cat".parse().unwrap(), domain_id), [] )) @@ -465,11 +456,11 @@ mod tests { let domain_id: DomainId = "meadow".parse().unwrap(); assert_eq!( finished_genesis_block.transactions[0].isi[5], - RegisterBox::new(Domain::new(domain_id.clone())).into() + RegisterExpr::new(Domain::new(domain_id.clone())).into() ); assert_eq!( finished_genesis_block.transactions[0].isi[6], - RegisterBox::new(Account::new( + RegisterExpr::new(Account::new( AccountId::new("Mad_Hatter".parse().unwrap(), domain_id), [public_key.parse().unwrap()], )) @@ -477,7 +468,7 @@ mod tests { ); assert_eq!( finished_genesis_block.transactions[0].isi[7], - RegisterBox::new(AssetDefinition::big_quantity( + RegisterExpr::new(AssetDefinition::big_quantity( "hats#meadow".parse().unwrap() )) .into() diff --git a/lints.toml b/lints.toml deleted file mode 100644 index b36eb51c845..00000000000 --- a/lints.toml +++ /dev/null @@ -1,167 +0,0 @@ -# For all clippy lints please visit: https://rust-lang.github.io/rust-clippy/master/ -deny = [ - 'anonymous_parameters', - 'clippy::all', - 'clippy::dbg_macro', - - # 'clippy::nursery', - 'clippy::debug_assert_with_mut_call', - 'clippy::derive_partial_eq_without_eq', - 'clippy::empty_line_after_outer_attr', - 'clippy::fallible_impl_from', - 'clippy::future_not_send', - 'clippy::iter_with_drain', - 'clippy::mutex_integer', - 'clippy::needless_collect', - 'clippy::path_buf_push_overwrite', - 'clippy::suboptimal_flops', - 'clippy::trailing_empty_array', - 'clippy::transmute_undefined_repr', - 'clippy::trivial_regex', - 'clippy::unused_peekable', - 'clippy::unused_rounding', - - 'clippy::pedantic', - 'future_incompatible', - 'missing_copy_implementations', - 'missing_docs', - 'nonstandard_style', - 'private_doc_tests', - 'rust_2018_idioms', - 'trivial_casts', - 'trivial_numeric_casts', - 'unconditional_recursion', - 'unsafe_code', - 'unused', - 'unused_import_braces', - 'variant_size_differences', - 'unused_tuple_struct_fields', - 'explicit_outlives_requirements', - 'non_ascii_idents', - # TODO: reenable - # 'unreachable_pub', - # 'unsafe_op_in_unsafe_fn', -] - -allow = [ - # These are up to personal taste. We don't want these to be enabled ever. - 'clippy::string_add', - 'unknown_lints', - 'clippy::as_conversions', - 'clippy::else_if_without_else', - 'clippy::enum_glob_use', - 'clippy::exhaustive_enums', - 'clippy::exhaustive_structs', - 'clippy::implicit_return', - 'clippy::inconsistent_struct_constructor', - 'clippy::indexing_slicing', - 'clippy::arithmetic_side_effects', - 'clippy::let_underscore_must_use', - 'clippy::match_wildcard_for_single_variants', - 'clippy::missing_docs_in_private_items', - 'clippy::module_name_repetitions', - 'clippy::pattern_type_mismatch', - 'clippy::shadow_reuse', - 'clippy::shadow_same', - - # These are normally decisions, which need to be audited by a human. - 'clippy::unwrap_in_result', - 'clippy::expect_used', - 'clippy::unreachable', - 'clippy::wildcard_enum_match_arm', - 'clippy::wildcard_imports', - # Our preferred style. - 'clippy::non-ascii-literal', - 'clippy::std_instead_of_core', - - # This lint could be useful in theory. The trade-off of making - # refactoring away from references difficult isn't worth it in all - # cases, so if it is enabled, it should be enabled locally. - 'clippy::pattern_type_mismatch', - - # Style guide. - 'clippy::mod-module-files', - 'clippy::separated-literal-suffix', - # Most trybuild code triggers a false-positive. - - # Not all public items should be inline. We only inline **trivial** functions. - 'clippy::missing_inline_in_public_items', - - 'unknown_lints', - - # --- Re-enable candidates ----- - - # Lots of false-positives. - 'clippy::self-named-module-files', - 'clippy::manual_let_else', - - # We often need to shadow the name of the method to specialise. - # As soon as trait specialisation is stable we need to remove it. - 'clippy::same_name_method', - 'clippy::pub_use', - - # Style guide candidate. Explicitly converting the return value to - # () is good for refactoring, and if there is necessary - # processing of the data returned by a function, it should - # **really** be marked as #[must_use] - 'clippy::semicolon_if_nothing_returned', - - # This lint has way too many false-positives, so even enabling it - # as a warning is too much. Instead prefer adding explicit - # `#[deny]` directives - 'clippy::must_use_candidate', - - # Unstable and many false-positives - ## https://rust-lang.github.io/rust-clippy/master/index.html#missing_const_for_fn - 'clippy::missing_const_for_fn', - - # Too much affected code. Often impossible to apply suggestion on stable rust. - 'elided_lifetimes_in_paths', - - # This lint produces a lot of false positives. Recommend local #[deny] directives - 'clippy::use_self', - - # We don't want to manually deny every `clippy::restriction::*` lint. - 'clippy::blanket-clippy-restriction-lints', - - # A lot of false-positive. - 'clippy::partial_pub_fields', - - # Should be enabled per trait impl rather than globally. - 'clippy::missing_trait_methods', - - # We allow this and deny `clippy::semicolon_inside_block`. - 'clippy::semicolon_outside_block', - - # It is debatable whether it's actually easier to read, - # additionally, not all patterns are covered by the inlined syntax - 'clippy::uninlined_format_args', -] - -warn = [ - # These are lints which should really be conveyed to the author, - # but not necessarily fixed. - - 'unknown_lints', - 'clippy::inconsistent_struct_constructor', - 'clippy::match_wildcard_for_single_variants', - 'clippy::arithmetic_side_effects', - 'clippy::option_if_let_else', - 'clippy::or_fun_call', - 'clippy::redundant_pub_crate', - 'clippy::string_lit_as_bytes', - 'clippy::suspicious_operation_groupings', - 'clippy::useless_let_if_seq', - - # unstable - # 'non_exhaustive_omitted_patterns', - - 'single_use_lifetimes', - 'unused_lifetimes', - - # A couple of false positives. - # 'unused_qualifications', - - # Lots of false-positives. - # 'unused_crate_dependencies', -] diff --git a/logger/Cargo.toml b/logger/Cargo.toml index 12e94839986..9d3ce2ac20c 100644 --- a/logger/Cargo.toml +++ b/logger/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [dependencies] iroha_config = { workspace = true } iroha_data_model = { workspace = true } @@ -14,12 +17,12 @@ iroha_data_model = { workspace = true } color-eyre = { workspace = true } serde_json = { workspace = true } tracing = { workspace = true } -tracing-core = { workspace = true } -tracing-futures = { workspace = true, features = ["std-future", "std"] } +tracing-core = "0.1.31" +tracing-futures = { version = "0.2.5", default-features = false, features = ["std-future", "std"] } tracing-subscriber = { workspace = true, features = ["fmt", "ansi"] } -tracing-bunyan-formatter = { workspace = true } +tracing-bunyan-formatter = { version = "0.3.9", default-features = false } tokio = { workspace = true, features = ["sync"] } -console-subscriber = { version = "0.1.8", optional = true } +console-subscriber = { version = "0.2.0", optional = true } once_cell = { workspace = true } derive_more = { workspace = true } tracing-error = "0.2.0" @@ -29,7 +32,6 @@ tokio = { workspace = true, features = ["macros", "time", "rt"] } [features] -default = [] tokio-console = ["dep:console-subscriber", "tokio/tracing", "iroha_config/tokio-console"] # Workaround to avoid activating `tokio-console` with `--all-features` flag, because `tokio-console` require `tokio_unstable` rustc flag no-tokio-console = [] diff --git a/logger/src/layer.rs b/logger/src/layer.rs index 1ae5b8303c3..5b17d93cfe4 100644 --- a/logger/src/layer.rs +++ b/logger/src/layer.rs @@ -1,5 +1,4 @@ //! Module for adding layers for events for subscribers -#![allow(clippy::std_instead_of_core, clippy::std_instead_of_alloc)] use std::{ any::TypeId, fmt::Debug, diff --git a/logger/src/lib.rs b/logger/src/lib.rs index aa4abc10751..fceecc1d26c 100644 --- a/logger/src/lib.rs +++ b/logger/src/lib.rs @@ -1,10 +1,4 @@ //! Iroha's logging utilities. -#![allow( - clippy::expect_used, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] - pub mod layer; pub mod telemetry; diff --git a/logger/src/telemetry.rs b/logger/src/telemetry.rs index 1af4bae5eb8..526209daa60 100644 --- a/logger/src/telemetry.rs +++ b/logger/src/telemetry.rs @@ -1,11 +1,5 @@ //! Module with telemetry layer for tracing -#![allow( - clippy::module_name_repetitions, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] - use std::{error::Error, fmt::Debug}; use derive_more::{Deref, DerefMut}; diff --git a/logger/tests/configuration.rs b/logger/tests/configuration.rs index 9d353fab8b0..661443ed256 100644 --- a/logger/tests/configuration.rs +++ b/logger/tests/configuration.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::time::Duration; use iroha_data_model::Level; @@ -14,7 +12,7 @@ async fn telemetry_separation_custom() { compact_mode: true, log_file_path: Some("/dev/stdout".into()), terminal_colors: true, - #[cfg(all(feature = "tokio-console", not(feature = "no-tokio-console")))] + #[cfg(feature = "tokio-console")] tokio_console_addr: "127.0.0.1:5555".into(), }; let (mut receiver, _) = init(&config).unwrap().unwrap(); diff --git a/logger/tests/log_level.rs b/logger/tests/log_level.rs index b97c9c7ce63..ac04284c1d4 100644 --- a/logger/tests/log_level.rs +++ b/logger/tests/log_level.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction, clippy::all)] - use std::time::Duration; use iroha_logger::{ @@ -16,6 +14,7 @@ struct SenderFilter { } impl SenderFilter { + #[allow(clippy::new_ret_no_self)] pub fn new(sub: S) -> (impl Subscriber, mpsc::UnboundedReceiver<()>) { let (sender, receiver) = mpsc::unbounded_channel(); (EventSubscriber(Self { sender, sub }), receiver) diff --git a/logger/tests/setting_logger.rs b/logger/tests/setting_logger.rs index da82ff950e3..6d204f7abca 100644 --- a/logger/tests/setting_logger.rs +++ b/logger/tests/setting_logger.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction, clippy::expect_used)] - use iroha_config::base::proxy::Builder; use iroha_logger::{init, ConfigurationProxy}; diff --git a/logger/tests/telemetry.rs b/logger/tests/telemetry.rs index 50507412673..bfab41332eb 100644 --- a/logger/tests/telemetry.rs +++ b/logger/tests/telemetry.rs @@ -1,5 +1,3 @@ -#![allow(clippy::restriction)] - use std::time::Duration; use iroha_config::base::proxy::Builder; diff --git a/macro/Cargo.toml b/macro/Cargo.toml index 8fd1b71b41d..aee10f93f8b 100644 --- a/macro/Cargo.toml +++ b/macro/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [badges] is-it-maintained-issue-resolution = { repository = "https://github.com/hyperledger/iroha" } is-it-maintained-open-issues = { repository = "https://github.com/hyperledger/iroha" } diff --git a/macro/derive/Cargo.toml b/macro/derive/Cargo.toml index e7dd8d06e2a..5f09f48c489 100644 --- a/macro/derive/Cargo.toml +++ b/macro/derive/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true diff --git a/macro/derive/src/lib.rs b/macro/derive/src/lib.rs index 6d8df4912a4..c8121d9a317 100644 --- a/macro/derive/src/lib.rs +++ b/macro/derive/src/lib.rs @@ -1,7 +1,5 @@ //! Crate with various derive macros -#![allow(clippy::restriction)] - use darling::{util::SpannedValue, FromDeriveInput}; use manyhow::{manyhow, Result}; use proc_macro2::{Span, TokenStream}; diff --git a/macro/src/lib.rs b/macro/src/lib.rs index 35b71caacec..e7d398f16b5 100644 --- a/macro/src/lib.rs +++ b/macro/src/lib.rs @@ -1,6 +1,4 @@ //! Crate containing iroha macros - -#![allow(clippy::module_name_repetitions, clippy::std_instead_of_core)] #![cfg_attr(not(feature = "std"), no_std)] pub use iroha_derive::*; diff --git a/macro/utils/Cargo.toml b/macro/utils/Cargo.toml index 4f8ca144381..08c1dce1270 100644 --- a/macro/utils/Cargo.toml +++ b/macro/utils/Cargo.toml @@ -12,10 +12,11 @@ is-it-maintained-issue-resolution = { repository = "https://github.com/hyperledg is-it-maintained-open-issues = { repository = "https://github.com/hyperledger/iroha" } maintenance = { status = "actively-developed" } -[features] - [dependencies] syn = { workspace = true, features = ["default", "parsing", "printing"] } +syn2 = { workspace = true, features = ["default", "parsing", "printing"] } +darling = { workspace = true } quote = { workspace = true } proc-macro2 = { workspace = true } -proc-macro-error = { workspace = true } +manyhow = { workspace = true } +drop_bomb = "0.1.5" diff --git a/ffi/derive/src/emitter.rs b/macro/utils/src/emitter.rs similarity index 80% rename from ffi/derive/src/emitter.rs rename to macro/utils/src/emitter.rs index 193d961c663..9509b43930d 100644 --- a/ffi/derive/src/emitter.rs +++ b/macro/utils/src/emitter.rs @@ -1,8 +1,9 @@ +//! A wrapper type around [`manyhow::Emitter`] that provides a more ergonomic API. + use drop_bomb::DropBomb; use manyhow::ToTokensError; use proc_macro2::TokenStream; -// TODO: move this type to `derive-primitives` crate /// A wrapper type around [`manyhow::Emitter`] that provides a more ergonomic API. /// /// This type is used to accumulate errors during parsing and code generation. @@ -15,6 +16,7 @@ pub struct Emitter { } impl Emitter { + /// Creates a new emitter. It must be consumed by calling any of the `finish_*` functions before dropping or it will panic. pub fn new() -> Self { Self { inner: manyhow::Emitter::new(), @@ -53,18 +55,32 @@ impl Emitter { } /// Consume the emitter, returning a [`manyhow::Error`] if any errors were emitted. + /// + /// # Errors + /// + /// This function returns an error if the emitter has some errors accumulated. pub fn finish(mut self) -> manyhow::Result<()> { self.bomb.defuse(); self.inner.into_result() } /// Same as [`Emitter::finish`], but returns the given value if no errors were emitted. + /// + /// # Errors + /// + /// This function returns an error if the emitter has some errors accumulated. #[allow(unused)] pub fn finish_with(self, result: T) -> manyhow::Result { self.finish().map(|_| result) } /// Handles the given [`manyhow::Result`] and consumes the emitter. + /// + /// # Errors + /// + /// This function returns an error if: + /// - The given result is `Err` + /// - The emitter has some errors accumulated #[allow(unused)] pub fn finish_and( mut self, @@ -80,7 +96,7 @@ impl Emitter { } /// Consume the emitter, convert all errors into a token stream and append it to the given token stream. - pub fn into_tokens(self, tokens: &mut TokenStream) { + pub fn finish_to_token_stream(self, tokens: &mut TokenStream) { match self.finish() { Ok(()) => {} Err(e) => e.to_tokens(tokens), @@ -90,7 +106,7 @@ impl Emitter { /// Consume the emitter, convert all errors into a token stream. pub fn finish_token_stream(self) -> TokenStream { let mut tokens_stream = TokenStream::new(); - self.into_tokens(&mut tokens_stream); + self.finish_to_token_stream(&mut tokens_stream); tokens_stream } @@ -98,11 +114,17 @@ impl Emitter { /// /// This function is useful when you want to handle errors in a macro, but want to emit some tokens even in case of an error. pub fn finish_token_stream_with(self, mut tokens_stream: TokenStream) -> TokenStream { - self.into_tokens(&mut tokens_stream); + self.finish_to_token_stream(&mut tokens_stream); tokens_stream } } +impl Default for Emitter { + fn default() -> Self { + Self::new() + } +} + impl Extend for Emitter { fn extend>(&mut self, iter: T) { self.inner.extend(iter) diff --git a/macro/utils/src/lib.rs b/macro/utils/src/lib.rs index 2d6d6ef3e70..1e069d1bfd8 100644 --- a/macro/utils/src/lib.rs +++ b/macro/utils/src/lib.rs @@ -1,5 +1,9 @@ //! Module for various functions and structs to build macros in iroha. +mod emitter; + +pub use emitter::Emitter; + /// Trait for attribute parsing generalization pub trait AttrParser { /// Attribute identifier `#[IDENT...]` @@ -65,3 +69,155 @@ macro_rules! attr_struct { } }; } + +/// Extension trait for [`darling::Error`]. +/// +/// Currently exists to add `with_spans` method. +pub trait DarlingErrorExt: Sized { + /// Attaches a combination of multiple spans to the error. + /// + /// Note that it only attaches the first span on stable rustc, as the `Span::join` method is not yet stabilized (). + #[must_use] + fn with_spans(self, spans: impl IntoIterator>) -> Self; +} + +impl DarlingErrorExt for darling::Error { + fn with_spans(self, spans: impl IntoIterator>) -> Self { + // Unfortunately, the story for combining multiple spans in rustc proc macro is not yet complete. + // (see https://github.com/rust-lang/rust/issues/54725#issuecomment-649078500, https://github.com/rust-lang/rust/issues/54725#issuecomment-1547795742) + // syn does some hacks to get error reporting that is a bit better: https://docs.rs/syn/2.0.37/src/syn/error.rs.html#282 + // we can't to that because darling's error type does not let us do that. + + // on nightly, we are fine, as `.join` method works. On stable, we fall back to returning the first span. + + let mut iter = spans.into_iter(); + let Some(first) = iter.next() else { + return self; + }; + let first: proc_macro2::Span = first.into(); + let r = iter + .try_fold(first, |a, b| a.join(b.into())) + .unwrap_or(first); + + self.with_span(&r) + } +} + +/// Finds an optional single attribute with specified name. +/// +/// Returns `None` if no attributes with specified name are found. +/// +/// Emits an error into accumulator if multiple attributes with specified name are found. +#[must_use] +pub fn find_single_attr_opt<'a>( + accumulator: &mut darling::error::Accumulator, + attr_name: &str, + attrs: &'a [syn2::Attribute], +) -> Option<&'a syn2::Attribute> { + let matching_attrs = attrs + .iter() + .filter(|a| a.path().is_ident(attr_name)) + .collect::>(); + let attr = match *matching_attrs.as_slice() { + [] => { + return None; + } + [attr] => attr, + [attr, ref tail @ ..] => { + // allow parsing to proceed further to collect more errors + accumulator.push( + darling::Error::custom(format!("Only one #[{}] attribute is allowed!", attr_name)) + .with_spans(tail.iter().map(syn2::spanned::Spanned::span)), + ); + attr + } + }; + + Some(attr) +} + +/// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. +/// +/// If no attribute with specified name is found, returns `Ok(None)`. +/// +/// # Errors +/// +/// - If multiple attributes with specified name are found +/// - If attribute is not a list +pub fn parse_single_list_attr_opt( + attr_name: &str, + attrs: &[syn2::Attribute], +) -> darling::Result> { + let mut accumulator = darling::error::Accumulator::default(); + + let Some(attr) = find_single_attr_opt(&mut accumulator, attr_name, attrs) else { + return accumulator.finish_with(None); + }; + + let mut kind = None; + + match &attr.meta { + syn2::Meta::Path(_) | syn2::Meta::NameValue(_) => accumulator.push(darling::Error::custom( + format!("Expected #[{}(...)] attribute to be a list", attr_name), + )), + syn2::Meta::List(list) => { + kind = accumulator.handle(syn2::parse2(list.tokens.clone()).map_err(Into::into)); + } + } + + accumulator.finish_with(kind) +} + +/// Parses a single attribute of the form `#[attr_name(...)]` for darling using a `syn::parse::Parse` implementation. +/// +/// If no attribute with specified name is found, returns an error. +/// +/// # Errors +/// +/// - If multiple attributes with specified name are found +/// - If attribute is not a list +/// - If attribute is not found +pub fn parse_single_list_attr( + attr_name: &str, + attrs: &[syn2::Attribute], +) -> darling::Result { + parse_single_list_attr_opt(attr_name, attrs)? + .ok_or_else(|| darling::Error::custom(format!("Missing `#[{}(...)]` attribute", attr_name))) +} + +/// Macro for automatic [`syn::parse::Parse`] impl generation for keyword +/// attribute structs in derive macros. +#[macro_export] +macro_rules! attr_struct2 { + // Matching struct with named fields + ( + $( #[$meta:meta] )* + // ^~~~attributes~~~~^ + $vis:vis struct $name:ident { + $( + $( #[$field_meta:meta] )* + // ^~~~field attributes~~~!^ + $field_vis:vis $field_name:ident : $field_ty:ty + // ^~~~~~~~~~~~~~~~~a single field~~~~~~~~~~~~~~~^ + ),* + $(,)? } + ) => { + $( #[$meta] )* + $vis struct $name { + $( + $( #[$field_meta] )* + $field_vis $field_name : $field_ty + ),* + } + + impl syn2::parse::Parse for $name { + fn parse(input: syn2::parse::ParseStream) -> syn2::Result { + Ok(Self { + $( + $field_name: input.parse()?, + )* + }) + } + } + }; +} diff --git a/p2p/Cargo.toml b/p2p/Cargo.toml index 758888f1fde..6745de7e992 100644 --- a/p2p/Cargo.toml +++ b/p2p/Cargo.toml @@ -8,6 +8,9 @@ authors.workspace = true license.workspace = true categories = ["network-programming"] +[lints] +workspace = true + [dependencies] iroha_logger = { workspace = true } iroha_crypto = { workspace = true } @@ -18,11 +21,9 @@ iroha_data_model_derive = { workspace = true } rand = { workspace = true } tokio = { workspace = true, features = ["rt-multi-thread", "macros", "io-util", "net", "time"] } -async-stream = { workspace = true } futures = { workspace = true, features = ["alloc"] } async-trait = { workspace = true } parity-scale-codec = { workspace = true, features = ["derive"] } -aead = { workspace = true } thiserror = { workspace = true } displaydoc = { workspace = true } derive_more = { workspace = true } diff --git a/p2p/src/lib.rs b/p2p/src/lib.rs index 963d6355f29..b62ce1afd74 100644 --- a/p2p/src/lib.rs +++ b/p2p/src/lib.rs @@ -1,17 +1,15 @@ //! This module provides a network layer for holding of persistent //! connections between blockchain nodes. Sane defaults for secure //! Cryptography are chosen in this module, and encapsulated. -#![allow(clippy::std_instead_of_core, clippy::std_instead_of_alloc)] use std::{io, net::AddrParseError}; -use iroha_crypto::ursa::{ +use iroha_crypto::{ blake2::{ digest::{Update, VariableOutput}, - VarBlake2b, + Blake2bVar, }, - encryption::symm::prelude::ChaCha20Poly1305, - kex::x25519::X25519Sha256, - CryptoError, + encryption::ChaCha20Poly1305, + kex::X25519Sha256, }; pub use network::message::*; use parity_scale_codec::{Decode, Encode}; @@ -27,7 +25,7 @@ pub mod boilerplate { //! Module containing trait shorthands. Remove when trait aliases //! are stable - use iroha_crypto::ursa::{encryption::symm::Encryptor, kex::KeyExchangeScheme}; + use iroha_crypto::{encryption::Encryptor, kex::KeyExchangeScheme}; use super::*; @@ -40,8 +38,8 @@ pub mod boilerplate { impl Kex for T where T: KeyExchangeScheme + Send + 'static {} /// Shorthand for traits required for encryptor - pub trait Enc: Encryptor + Send + 'static {} - impl Enc for T where T: Encryptor + Send + 'static {} + pub trait Enc: Encryptor + Clone + Send + 'static {} + impl Enc for T where T: Encryptor + Clone + Send + 'static {} } /// Errors used in [`crate`]. @@ -56,32 +54,15 @@ pub enum Error { /// Parity Scale codec error ParityScale(#[from] parity_scale_codec::Error), /// Failed to create keys - Keys(#[source] CryptographicError), + Keys(#[from] iroha_crypto::error::Error), + /// Symmetric encryption has failed + SymmetricEncryption(#[from] iroha_crypto::encryption::Error), /// Failed to parse socket address Addr(#[from] AddrParseError), /// Connection reset by peer in the middle of message transfer ConnectionResetByPeer, } -/// Error in the cryptographic processes. -#[derive(derive_more::From, Debug, Error, displaydoc::Display)] -pub enum CryptographicError { - /// Decryption failed - #[from(ignore)] - Decrypt(aead::Error), - /// Encryption failed - #[from(ignore)] - Encrypt(aead::Error), - /// Ursa Cryptography error - Ursa(CryptoError), -} - -impl> From for Error { - fn from(err: T) -> Self { - Self::Keys(err.into()) - } -} - impl From for Error { fn from(e: io::Error) -> Self { Self::Io(std::sync::Arc::new(e)) @@ -158,7 +139,7 @@ pub(crate) mod unbounded_with_len { /// Create Blake2b hash as u64 value pub fn blake2b_hash(slice: impl AsRef<[u8]>) -> u64 { const U64_SIZE: usize = core::mem::size_of::(); - let hash = VarBlake2b::new(U64_SIZE) + let hash = Blake2bVar::new(U64_SIZE) .expect("Failed to create hash with given length") .chain(&slice) .finalize_boxed(); diff --git a/p2p/src/network.rs b/p2p/src/network.rs index a3b3da23294..751eb779d3d 100644 --- a/p2p/src/network.rs +++ b/p2p/src/network.rs @@ -1,5 +1,4 @@ //! Network formed out of Iroha peers. -#![allow(clippy::std_instead_of_core)] use std::{ collections::{HashMap, HashSet}, fmt::Debug, @@ -7,7 +6,7 @@ use std::{ }; use futures::{stream::FuturesUnordered, StreamExt}; -use iroha_crypto::PublicKey; +use iroha_crypto::{KeyPair, PublicKey}; use iroha_data_model::prelude::PeerId; use iroha_logger::prelude::*; use iroha_primitives::addr::SocketAddr; @@ -66,8 +65,8 @@ impl NetworkBaseHandle { /// /// # Errors /// - If binding to address fail - #[log(skip(public_key))] - pub async fn start(listen_addr: SocketAddr, public_key: PublicKey) -> Result { + #[log(skip(key_pair))] + pub async fn start(listen_addr: SocketAddr, key_pair: KeyPair) -> Result { let listener = TcpListener::bind(&listen_addr.to_string()).await?; iroha_logger::info!("Network bound to listener"); let (online_peers_sender, online_peers_receiver) = watch::channel(HashSet::new()); @@ -83,7 +82,7 @@ impl NetworkBaseHandle { listener, peers: HashMap::new(), connecting_peers: HashMap::new(), - public_key, + key_pair, subscribers_to_peers_messages: Vec::new(), subscribe_to_peers_messages_receiver, online_peers_sender, @@ -167,8 +166,8 @@ struct NetworkBase { connecting_peers: HashMap, /// [`TcpListener`] that is accepting [`Peer`]s' connections listener: TcpListener, - /// Our app-level public key - public_key: PublicKey, + /// Our app-level key pair + key_pair: KeyPair, /// Recipients of messages received from other peers in the network. subscribers_to_peers_messages: Vec>, /// Receiver to subscribe for messages received from other peers in the network. @@ -200,11 +199,10 @@ struct NetworkBase { impl NetworkBase { /// [`Self`] task. - #[log(skip(self), fields(listen_addr=%self.listen_addr, public_key=%self.public_key))] + #[log(skip(self), fields(listen_addr=%self.listen_addr, public_key=%self.key_pair.public_key()))] async fn run(mut self) { // TODO: probably should be configuration parameter let mut update_topology_interval = tokio::time::interval(Duration::from_millis(100)); - #[allow(clippy::arithmetic_side_effects)] loop { tokio::select! { // Select is biased because we want to service messages to take priority over data messages. @@ -275,7 +273,8 @@ impl NetworkBase { let conn_id = self.get_conn_id(); let service_message_sender = self.service_message_sender.clone(); connected_from::( - PeerId::new(addr, &self.public_key), + addr.clone(), + self.key_pair.clone(), Connection::new(conn_id, stream), service_message_sender, ); @@ -283,7 +282,7 @@ impl NetworkBase { fn set_current_topology(&mut self, UpdateTopology(topology): UpdateTopology) { iroha_logger::debug!(?topology, "Network receive new topology"); - let self_public_key_hash = blake2b_hash(self.public_key.payload()); + let self_public_key_hash = blake2b_hash(self.key_pair.public_key().payload()); let topology = topology .into_iter() .map(|peer_id| { @@ -337,7 +336,8 @@ impl NetworkBase { let service_message_sender = self.service_message_sender.clone(); connecting::( // NOTE: we intentionally use peer's address and our public key, it's used during handshake - PeerId::new(&peer.address, &self.public_key), + peer.address.clone(), + self.key_pair.clone(), conn_id, service_message_sender, ); @@ -400,11 +400,13 @@ impl NetworkBase { fn peer_terminated(&mut self, Terminated { peer_id, conn_id }: Terminated) { self.connecting_peers.remove(&conn_id); - if let Some(peer) = self.peers.get(&peer_id.public_key) { - if peer.conn_id == conn_id { - iroha_logger::debug!(conn_id, peer=%peer_id, "Peer terminated"); - self.peers.remove(&peer_id.public_key); - Self::remove_online_peer(&self.online_peers_sender, &peer_id); + if let Some(peer_id) = peer_id { + if let Some(peer) = self.peers.get(&peer_id.public_key) { + if peer.conn_id == conn_id { + iroha_logger::debug!(conn_id, peer=%peer_id, "Peer terminated"); + self.peers.remove(&peer_id.public_key); + Self::remove_online_peer(&self.online_peers_sender, &peer_id); + } } } } @@ -419,7 +421,7 @@ impl NetworkBase { Self::remove_online_peer(&self.online_peers_sender, &peer_id); } } - None if peer_id.public_key == self.public_key => { + None if &peer_id.public_key == self.key_pair.public_key() => { #[cfg(debug_assertions)] iroha_logger::trace!("Not sending message to myself") } diff --git a/p2p/src/peer.rs b/p2p/src/peer.rs index e7dbd5b95bf..e08a93f6aca 100644 --- a/p2p/src/peer.rs +++ b/p2p/src/peer.rs @@ -1,5 +1,4 @@ //! Tokio actor Peer -#![allow(clippy::arithmetic_side_effects)] use bytes::{Buf, BufMut, BytesMut}; use iroha_data_model::prelude::PeerId; @@ -15,7 +14,7 @@ use tokio::{ sync::{mpsc, oneshot}, }; -use crate::{boilerplate::*, CryptographicError, Error}; +use crate::{boilerplate::*, Error}; /// Max length of message handshake in bytes excluding first message length byte. pub const MAX_HANDSHAKE_LENGTH: u8 = 255; @@ -26,19 +25,23 @@ pub const DEFAULT_AAD: &[u8; 10] = b"Iroha2 AAD"; pub mod handles { //! Module with functions to start peer actor and handle to interact with it. + use iroha_crypto::KeyPair; use iroha_logger::Instrument; + use iroha_primitives::addr::SocketAddr; use super::{run::RunPeerArgs, *}; use crate::unbounded_with_len; /// Start Peer in [`state::Connecting`] state pub fn connecting( - peer_id: PeerId, + peer_addr: SocketAddr, + key_pair: KeyPair, connection_id: ConnectionId, service_message_sender: mpsc::Sender>, ) { let peer = state::Connecting { - peer_id, + peer_addr, + key_pair, connection_id, }; let peer = RunPeerArgs { @@ -50,12 +53,14 @@ pub mod handles { /// Start Peer in [`state::ConnectedFrom`] state pub fn connected_from( - peer_id: PeerId, + peer_addr: SocketAddr, + key_pair: KeyPair, connection: Connection, service_message_sender: mpsc::Sender>, ) { let peer = state::ConnectedFrom { - peer_id, + peer_addr, + key_pair, connection, }; let peer = RunPeerArgs { @@ -94,7 +99,7 @@ mod run { state::{ConnectedFrom, Connecting, Ready}, *, }; - use crate::{blake2b_hash, unbounded_with_len}; + use crate::unbounded_with_len; /// Peer task. #[allow(clippy::too_many_lines)] @@ -106,7 +111,7 @@ mod run { }: RunPeerArgs, ) { let conn_id = peer.connection_id(); - let mut peer_id = peer.peer_id().clone(); + let mut peer_id = None; iroha_logger::trace!("Peer created"); @@ -131,9 +136,9 @@ mod run { }, cryptographer, } = peer; - peer_id = new_peer_id; + let peer_id = peer_id.insert(new_peer_id); - let disambiguator = blake2b_hash(&cryptographer.shared_key); + let disambiguator = cryptographer.disambiguator; tracing::Span::current().record("peer", &peer_id.to_string()); tracing::Span::current().record("disambiguator", disambiguator); @@ -216,7 +221,7 @@ mod run { iroha_logger::debug!("Peer is terminated."); let _ = service_message_sender - .send(ServiceMessage::Terminated(Terminated { conn_id, peer_id })) + .send(ServiceMessage::Terminated(Terminated { peer_id, conn_id })) .await; } @@ -229,28 +234,18 @@ mod run { /// Trait for peer stages that might be used as starting point for peer's [`run`] function. pub(super) trait Entrypoint: Handshake + Send + 'static { fn connection_id(&self) -> ConnectionId; - - fn peer_id(&self) -> &PeerId; } impl Entrypoint for Connecting { fn connection_id(&self) -> ConnectionId { self.connection_id } - - fn peer_id(&self) -> &PeerId { - &self.peer_id - } } impl Entrypoint for ConnectedFrom { fn connection_id(&self) -> ConnectionId { self.connection.id } - - fn peer_id(&self) -> &PeerId { - &self.peer_id - } } /// Cancellation-safe way to read messages from tcp stream @@ -375,28 +370,32 @@ mod run { mod state { //! Module for peer stages. - use iroha_crypto::ursa::keys::PublicKey; + use iroha_crypto::{KeyPair, PublicKey, Signature}; + use iroha_primitives::{addr::SocketAddr, const_vec::ConstVec}; use super::{cryptographer::Cryptographer, *}; /// Peer that is connecting. This is the initial stage of a new /// outgoing peer. pub(super) struct Connecting { - pub peer_id: PeerId, + pub peer_addr: SocketAddr, + pub key_pair: KeyPair, pub connection_id: ConnectionId, } impl Connecting { pub(super) async fn connect_to( Self { - peer_id, + peer_addr, + key_pair, connection_id, }: Self, ) -> Result { - let stream = TcpStream::connect(peer_id.address.to_string()).await?; + let stream = TcpStream::connect(peer_addr.to_string()).await?; let connection = Connection::new(connection_id, stream); Ok(ConnectedTo { - peer_id, + peer_addr, + key_pair, connection, }) } @@ -404,36 +403,41 @@ mod state { /// Peer that is being connected to. pub(super) struct ConnectedTo { - peer_id: PeerId, + peer_addr: SocketAddr, + key_pair: KeyPair, connection: Connection, } impl ConnectedTo { pub(super) async fn send_client_hello( Self { - peer_id, + peer_addr, + key_pair, mut connection, }: Self, ) -> Result, crate::Error> { let key_exchange = K::new(); - let (local_public_key, local_private_key) = key_exchange.keypair(None)?; + let (kx_local_pk, kx_local_sk) = key_exchange.keypair(None)?; + let (algorithm, kx_local_pk_raw) = kx_local_pk.clone().into_raw(); let write_half = &mut connection.write; garbage::write(write_half).await?; - write_half.write_all(local_public_key.as_ref()).await?; + write_half.write_all(&kx_local_pk_raw).await?; // Read server hello with node's public key let read_half = &mut connection.read; - let remote_public_key = { + let kx_remote_pk = { garbage::read(read_half).await?; // Then we have servers public key let mut key = vec![0_u8; 32]; let _ = read_half.read_exact(&mut key).await?; - PublicKey(key) + PublicKey::from_raw(algorithm, ConstVec::new(key)) }; - let shared_key = - key_exchange.compute_shared_secret(&local_private_key, &remote_public_key)?; - let cryptographer = Cryptographer::new(shared_key); + let shared_key = key_exchange.compute_shared_secret(&kx_local_sk, &kx_remote_pk); + let cryptographer = Cryptographer::new(&shared_key); Ok(SendKey { - peer_id, + peer_addr, + key_pair, + kx_local_pk, + kx_remote_pk, connection, cryptographer, }) @@ -442,36 +446,41 @@ mod state { /// Peer that is being connected from pub(super) struct ConnectedFrom { - pub peer_id: PeerId, + pub peer_addr: SocketAddr, + pub key_pair: KeyPair, pub connection: Connection, } impl ConnectedFrom { pub(super) async fn read_client_hello( Self { - peer_id, + peer_addr, + key_pair, mut connection, .. }: Self, ) -> Result, crate::Error> { let key_exchange = K::new(); - let (local_public_key, local_private_key) = key_exchange.keypair(None)?; + let (kx_local_pk, kx_local_sk) = key_exchange.keypair(None)?; + let (algorithm, kx_local_pk_raw) = kx_local_pk.clone().into_raw(); let read_half = &mut connection.read; - let remote_public_key = { + let kx_remote_pk = { garbage::read(read_half).await?; // And then we have clients public key let mut key = vec![0_u8; 32]; let _ = read_half.read_exact(&mut key).await?; - PublicKey(key) + PublicKey::from_raw(algorithm, ConstVec::new(key)) }; let write_half = &mut connection.write; garbage::write(write_half).await?; - write_half.write_all(local_public_key.as_ref()).await?; - let shared_key = - key_exchange.compute_shared_secret(&local_private_key, &remote_public_key)?; - let cryptographer = Cryptographer::new(shared_key); + write_half.write_all(&kx_local_pk_raw).await?; + let shared_key = key_exchange.compute_shared_secret(&kx_local_sk, &kx_remote_pk); + let cryptographer = Cryptographer::new(&shared_key); Ok(SendKey { - peer_id, + peer_addr, + key_pair, + kx_local_pk, + kx_remote_pk, connection, cryptographer, }) @@ -480,7 +489,10 @@ mod state { /// Peer that needs to send key. pub(super) struct SendKey { - peer_id: PeerId, + peer_addr: SocketAddr, + key_pair: KeyPair, + kx_local_pk: PublicKey, + kx_remote_pk: PublicKey, connection: Connection, cryptographer: Cryptographer, } @@ -488,16 +500,19 @@ mod state { impl SendKey { pub(super) async fn send_our_public_key( Self { - peer_id, + peer_addr, + key_pair, + kx_local_pk, + kx_remote_pk, mut connection, cryptographer, }: Self, ) -> Result, crate::Error> { let write_half = &mut connection.write; - // We take our public key from our `id` and will replace it with theirs when we read it - // Packing length and message in one network packet for efficiency - let data = peer_id.public_key().encode(); + let payload = create_payload(&kx_local_pk, &kx_remote_pk); + let signature = Signature::new(key_pair, &payload)?; + let data = signature.encode(); let data = &cryptographer.encrypt(data.as_slice())?; @@ -508,8 +523,10 @@ mod state { write_half.write_all(&buf).await?; Ok(GetKey { - peer_id, + peer_addr, connection, + kx_local_pk, + kx_remote_pk, cryptographer, }) } @@ -517,8 +534,10 @@ mod state { /// Peer that needs to get key. pub struct GetKey { - peer_id: PeerId, + peer_addr: SocketAddr, connection: Connection, + kx_local_pk: PublicKey, + kx_remote_pk: PublicKey, cryptographer: Cryptographer, } @@ -526,8 +545,10 @@ mod state { /// Read the peer's public key pub(super) async fn read_their_public_key( Self { - mut peer_id, + peer_addr, mut connection, + kx_local_pk, + kx_remote_pk, cryptographer, }: Self, ) -> Result, crate::Error> { @@ -539,9 +560,19 @@ mod state { let data = cryptographer.decrypt(data.as_slice())?; - let pub_key = DecodeAll::decode_all(&mut data.as_slice())?; + let signature: Signature = DecodeAll::decode_all(&mut data.as_slice())?; + + // Swap order of keys since we are verifying for other peer order remote/local keys is reversed + let payload = create_payload(&kx_remote_pk, &kx_local_pk); + signature.verify(&payload)?; + + let (remote_pub_key, _) = signature.into(); + + let peer_id = PeerId { + address: peer_addr, + public_key: remote_pub_key, + }; - peer_id.public_key = pub_key; Ok(Ready { peer_id, connection, @@ -557,6 +588,14 @@ mod state { pub connection: Connection, pub cryptographer: Cryptographer, } + + fn create_payload(kx_local_pk: &PublicKey, kx_remote_pk: &PublicKey) -> Vec { + let mut payload = + Vec::with_capacity(kx_local_pk.payload().len() + kx_remote_pk.payload().len()); + payload.extend(kx_local_pk.payload()); + payload.extend(kx_remote_pk.payload()); + payload + } } mod handshake { @@ -661,7 +700,7 @@ pub mod message { /// Peer faced error or `Terminate` message, send to indicate that it is terminated pub struct Terminated { /// Peer Id - pub peer_id: PeerId, + pub peer_id: Option, /// Connection Id pub conn_id: ConnectionId, } @@ -676,15 +715,16 @@ pub mod message { } mod cryptographer { - use aead::{generic_array::GenericArray, NewAead}; - use iroha_crypto::ursa::{encryption::symm::SymmetricEncryptor, keys::SessionKey}; + use iroha_crypto::{encryption::SymmetricEncryptor, SessionKey}; use super::*; + use crate::blake2b_hash; /// Peer's cryptographic primitives + #[derive(Clone)] pub struct Cryptographer { - /// Shared key - pub shared_key: SessionKey, + /// Blake2b hash of the session key, used as unique shared value between two peers + pub disambiguator: u64, /// Encryptor created from session key, that we got by Diffie-Hellman scheme pub encryptor: SymmetricEncryptor, } @@ -697,7 +737,6 @@ mod cryptographer { pub fn decrypt(&self, data: &[u8]) -> Result, Error> { self.encryptor .decrypt_easy(DEFAULT_AAD.as_ref(), data) - .map_err(CryptographicError::Decrypt) .map_err(Into::into) } @@ -708,27 +747,20 @@ mod cryptographer { pub fn encrypt(&self, data: &[u8]) -> Result, Error> { self.encryptor .encrypt_easy(DEFAULT_AAD.as_ref(), data) - .map_err(CryptographicError::Encrypt) .map_err(Into::into) } /// Derives shared key from local private key and remote public key. - pub fn new(shared_key: SessionKey) -> Self { - let encryptor = SymmetricEncryptor::::new(::new( - GenericArray::from_slice(shared_key.as_ref()), - )); + pub fn new(shared_key: &SessionKey) -> Self { + let disambiguator = blake2b_hash(shared_key.payload()); + + let encryptor = SymmetricEncryptor::::new_from_session_key(shared_key); Self { + disambiguator, encryptor, - shared_key, } } } - - impl Clone for Cryptographer { - fn clone(&self) -> Self { - Self::new(self.shared_key.clone()) - } - } } /// An identification for [`Peer`] connections. diff --git a/p2p/tests/integration/p2p.rs b/p2p/tests/integration/p2p.rs index 5cd769aca68..93f9a391765 100644 --- a/p2p/tests/integration/p2p.rs +++ b/p2p/tests/integration/p2p.rs @@ -1,9 +1,6 @@ -#![allow(clippy::restriction)] - use std::{ collections::HashSet, fmt::Debug, - str::FromStr, sync::{ atomic::{AtomicU32, Ordering}, Arc, Once, @@ -51,11 +48,9 @@ async fn network_create() { setup_logger(); info!("Starting network tests..."); let address = socket_addr!(127.0.0.1:12_000); - let public_key = iroha_crypto::PublicKey::from_str( - "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0", - ) - .unwrap(); - let network = NetworkHandle::start(address.clone(), public_key.clone()) + let key_pair = KeyPair::generate().unwrap(); + let public_key = key_pair.public_key().clone(); + let network = NetworkHandle::start(address.clone(), key_pair) .await .unwrap(); tokio::time::sleep(delay).await; @@ -160,23 +155,19 @@ impl TestActor { async fn two_networks() { let delay = Duration::from_millis(300); setup_logger(); - let public_key1 = iroha_crypto::PublicKey::from_str( - "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0", - ) - .unwrap(); - let public_key2 = iroha_crypto::PublicKey::from_str( - "ed01207233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C1", - ) - .unwrap(); + let key_pair1 = KeyPair::generate().unwrap(); + let public_key1 = key_pair1.public_key().clone(); + let key_pair2 = KeyPair::generate().unwrap().clone(); + let public_key2 = key_pair2.public_key().clone(); info!("Starting first network..."); let address1 = socket_addr!(127.0.0.1:12_005); - let mut network1 = NetworkHandle::start(address1.clone(), public_key1.clone()) + let mut network1 = NetworkHandle::start(address1.clone(), key_pair1) .await .unwrap(); info!("Starting second network..."); let address2 = socket_addr!(127.0.0.1:12_010); - let network2 = NetworkHandle::start(address2.clone(), public_key2.clone()) + let network2 = NetworkHandle::start(address2.clone(), key_pair2) .await .unwrap(); @@ -247,20 +238,22 @@ async fn multiple_networks() { .expect("Default logger config should always build") }; // Can't use logger because it's failed to initialize. - #[allow(clippy::print_stderr)] if let Err(err) = iroha_logger::init(&log_config) { eprintln!("Failed to initialize logger: {err}"); } info!("Starting..."); let mut peers = Vec::new(); + let mut key_pairs = Vec::new(); for i in 0_u16..10_u16 { let address = socket_addr!(127.0.0.1: 12_015 + ( i * 5)); - let keypair = KeyPair::generate().unwrap(); + let key_pair = KeyPair::generate().unwrap(); + let public_key = key_pair.public_key().clone(); peers.push(PeerId { address, - public_key: keypair.public_key().clone(), + public_key, }); + key_pairs.push(key_pair); } let mut networks = Vec::new(); @@ -272,9 +265,11 @@ async fn multiple_networks() { let barrier = Arc::new(Barrier::new(peers.len())); peers .iter() - .map(|peer| { + .zip(key_pairs) + .map(|(peer, key_pair)| { start_network( peer.clone(), + key_pair, peers.clone(), msgs.clone(), Arc::clone(&barrier), @@ -315,6 +310,7 @@ async fn multiple_networks() { async fn start_network( peer: PeerId, + key_pair: KeyPair, peers: Vec, messages: WaitForN, barrier: Arc, @@ -324,11 +320,8 @@ async fn start_network( // This actor will get the messages from other peers and increment the counter let actor = TestActor::start(messages); - let PeerId { - address, - public_key, - } = peer.clone(); - let mut network = NetworkHandle::start(address, public_key).await.unwrap(); + let PeerId { address, .. } = peer.clone(); + let mut network = NetworkHandle::start(address, key_pair).await.unwrap(); network.subscribe_to_peers_messages(actor); let _ = barrier.wait().await; @@ -357,21 +350,21 @@ async fn start_network( #[test] fn test_encryption() { - use iroha_crypto::ursa::encryption::symm::prelude::*; + use iroha_crypto::encryption::{ChaCha20Poly1305, SymmetricEncryptor}; const TEST_KEY: [u8; 32] = [ 5, 87, 82, 183, 220, 57, 107, 49, 227, 4, 96, 231, 198, 88, 153, 11, 22, 65, 56, 45, 237, 35, 231, 165, 122, 153, 14, 68, 13, 84, 5, 24, ]; - let encryptor = SymmetricEncryptor::::new_with_key(TEST_KEY).unwrap(); + let encryptor = SymmetricEncryptor::::new_with_key(TEST_KEY); let message = b"Some ciphertext"; let aad = b"Iroha2 AAD"; - let res = encryptor.encrypt_easy(aad.as_ref(), message.as_ref()); - assert!(res.is_ok()); - - let ciphertext = res.unwrap(); - let res_cipher = encryptor.decrypt_easy(aad.as_ref(), ciphertext.as_slice()); - assert!(res_cipher.is_ok()); - assert_eq!(res_cipher.unwrap().as_slice(), message); + let ciphertext = encryptor + .encrypt_easy(aad.as_ref(), message.as_ref()) + .unwrap(); + let decrypted = encryptor + .decrypt_easy(aad.as_ref(), ciphertext.as_slice()) + .unwrap(); + assert_eq!(decrypted.as_slice(), message); } diff --git a/primitives/Cargo.toml b/primitives/Cargo.toml index 43ef5bbabfb..6d9328a51ef 100644 --- a/primitives/Cargo.toml +++ b/primitives/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [badges] is-it-maintained-issue-resolution = { repository = "https://github.com/hyperledger/iroha" } is-it-maintained-open-issues = { repository = "https://github.com/hyperledger/iroha" } @@ -34,12 +37,11 @@ fixnum = { workspace = true, features = ["serde", "parity", "i64"] } derive_more = { workspace = true, features = ["display", "from", "as_ref", "as_mut", "deref", "constructor", "into_iterator"] } serde = { workspace = true, features = ["derive"] } serde_with = { workspace = true, features = ["macros"] } -smallvec = { version = "1.10.0", default-features = false, features = ["serde", "union"] } +smallvec = { version = "1.11.1", default-features = false, features = ["serde", "union"] } smallstr = { version = "0.3.0", default-features = false, features = ["serde", "union"] } thiserror = { workspace = true, optional = true } displaydoc = { workspace = true } - [dev-dependencies] serde_json = { workspace = true, features = ["alloc"] } trybuild = { workspace = true } diff --git a/primitives/src/addr.rs b/primitives/src/addr.rs index 00465a6fdfe..0e704f10353 100644 --- a/primitives/src/addr.rs +++ b/primitives/src/addr.rs @@ -6,7 +6,7 @@ #[cfg(not(feature = "std"))] use alloc::{format, string::String, vec::Vec}; -use derive_more::{AsRef, From, IntoIterator}; +use derive_more::{AsRef, DebugCustom, Display, From, IntoIterator}; use iroha_macro::FromVariant; pub use iroha_primitives_derive::socket_addr; use iroha_schema::IntoSchema; @@ -38,10 +38,8 @@ ffi::ffi_item! { /// An Iroha-native version of [`std::net::Ipv4Addr`], duplicated here /// to remain `no_std` compatible. #[derive( - AsRef, - From, - IntoIterator, - Debug, + DebugCustom, + Display, Clone, Copy, PartialEq, @@ -49,12 +47,17 @@ ffi::ffi_item! { PartialOrd, Ord, Hash, + AsRef, + From, + IntoIterator, DeserializeFromStr, SerializeDisplay, Encode, Decode, IntoSchema, )] + #[display(fmt = "{}.{}.{}.{}", "self.0[0]", "self.0[1]", "self.0[2]", "self.0[3]")] + #[debug(fmt = "{}.{}.{}.{}", "self.0[0]", "self.0[1]", "self.0[2]", "self.0[3]")] #[repr(transparent)] pub struct Ipv4Addr([u8; 4]); @@ -69,13 +72,6 @@ impl Ipv4Addr { } } -impl core::fmt::Display for Ipv4Addr { - #[inline] - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}.{}.{}.{}", self.0[0], self.0[1], self.0[2], self.0[3]) - } -} - impl core::ops::Index for Ipv4Addr { type Output = u8; @@ -123,9 +119,6 @@ ffi::ffi_item! { /// An Iroha-native version of [`std::net::Ipv6Addr`], duplicated here /// to remain `no_std` compatible. #[derive( - AsRef, - From, - IntoIterator, Debug, Clone, Copy, @@ -134,6 +127,9 @@ ffi::ffi_item! { PartialOrd, Ord, Hash, + AsRef, + From, + IntoIterator, DeserializeFromStr, SerializeDisplay, Encode, @@ -261,7 +257,8 @@ ffi::ffi_item! { /// This struct provides an Iroha-native version of [`std::net::SocketAddrV4`]. It is duplicated here /// in order to remain `no_std` compatible. #[derive( - Debug, + DebugCustom, + Display, Clone, Copy, PartialEq, @@ -275,6 +272,8 @@ ffi::ffi_item! { Decode, IntoSchema, )] + #[display(fmt = "{}:{}", "self.ip", "self.port")] + #[debug(fmt = "{}:{}", "self.ip", "self.port")] pub struct SocketAddrV4 { /// The Ipv4 address. pub ip: Ipv4Addr, @@ -292,12 +291,6 @@ impl From<([u8; 4], u16)> for SocketAddrV4 { } } -impl core::fmt::Display for SocketAddrV4 { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "{}:{}", self.ip, self.port) - } -} - impl core::str::FromStr for SocketAddrV4 { type Err = ParseError; @@ -314,7 +307,8 @@ ffi::ffi_item! { /// This struct provides an Iroha-native version of [`std::net::SocketAddrV6`]. It is duplicated here /// in order to remain `no_std` compatible. #[derive( - Debug, + DebugCustom, + Display, Clone, Copy, PartialEq, @@ -328,6 +322,8 @@ ffi::ffi_item! { Decode, IntoSchema, )] + #[display(fmt = "[{}]:{}", "self.ip", "self.port")] + #[debug(fmt = "[{}]:{}", "self.ip", "self.port")] pub struct SocketAddrV6 { /// The Ipv6 address. pub ip: Ipv6Addr, @@ -345,12 +341,6 @@ impl From<([u16; 8], u16)> for SocketAddrV6 { } } -impl core::fmt::Display for SocketAddrV6 { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "[{}]:{}", self.ip, self.port) - } -} - impl core::str::FromStr for SocketAddrV6 { type Err = ParseError; @@ -411,7 +401,8 @@ ffi::ffi_item! { /// This enum provides an Iroha-native version of [`std::net::SocketAddr`]. It is duplicated here /// in order to remain `no_std` compatible. #[derive( - Debug, + DebugCustom, + Display, Clone, PartialEq, Eq, @@ -475,16 +466,6 @@ impl SocketAddr { } } -impl core::fmt::Display for SocketAddr { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - SocketAddr::Ipv4(addr) => write!(f, "{}", addr), - SocketAddr::Ipv6(addr) => write!(f, "{}", addr), - SocketAddr::Host(addr) => write!(f, "{}", addr), - } - } -} - impl From<([u8; 4], u16)> for SocketAddr { fn from(value: ([u8; 4], u16)) -> Self { Self::Ipv4(value.into()) @@ -785,10 +766,10 @@ mod test { port: 9019, }); - assert_eq!( - serde_json::from_str::(&serde_json::to_string(&v6).unwrap()).unwrap(), - v6 - ); + let kita = &serde_json::to_string(&v6).unwrap(); + println!("{kita}"); + let kara = serde_json::from_str::(kita).unwrap(); + assert_eq!(kara, v6); let host = SocketAddr::Host(SocketAddrHost { host: "localhost".into(), diff --git a/primitives/src/cmpext.rs b/primitives/src/cmpext.rs new file mode 100644 index 00000000000..0de287e3f3c --- /dev/null +++ b/primitives/src/cmpext.rs @@ -0,0 +1,144 @@ +//! Utilities to work with [`BTreeMap`]/[`BTreeSet`] `get`/`range` functions. + +use core::cmp::Ordering; + +/// Type which adds two additional values for any type: +/// - `Min` which is smaller that any value of this type +/// - `Max` which is greater that any value of this type +/// +/// Used to enable query over prefix of the given key in the b-tree e.g. by account id in the asset id. +/// +/// Suppose compound key of three parts: `K = (A, B, C)`. +/// So that in sorting order keys will be sorted firstly by `A` then `B` and `C`. +/// This keys are stored in `BTreeMap` and it's required to extract all keys which have `A == a`. +/// To do this it's possible to use `range` provided by `BTreeMap`, +/// but it would't be enough to simply use `(a..=a)` bound for `K` because ranges bounds are found by binary search +/// and this way any key which has `A == a` can be treated as bound. +/// So `MinMaxExt` is used to express precise bound for such query: `(a, MIN, MIN)..(a, MAX, MAX)`. +#[derive(Debug, Clone, Copy)] +pub enum MinMaxExt { + /// Value that is greater than any value + Min, + /// Value that is smaller than any value + Max, + /// Regular value + Value(T), +} + +impl PartialEq for MinMaxExt { + fn eq(&self, other: &Self) -> bool { + match (self, other) { + (Self::Value(lhs), Self::Value(rhs)) => lhs.eq(rhs), + (Self::Min, Self::Min) | (Self::Max, Self::Max) => true, + _ => false, + } + } +} + +impl Eq for MinMaxExt {} + +impl PartialOrd for MinMaxExt { + fn partial_cmp(&self, other: &Self) -> Option { + match (self, other) { + (Self::Value(lhs), Self::Value(rhs)) => lhs.partial_cmp(rhs), + (lhs, rhs) if lhs == rhs => Some(Ordering::Equal), + (Self::Min, _) | (_, Self::Max) => Some(Ordering::Less), + (Self::Max, _) | (_, Self::Min) => Some(Ordering::Greater), + } + } +} + +impl Ord for MinMaxExt { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (Self::Value(lhs), Self::Value(rhs)) => lhs.cmp(rhs), + (lhs, rhs) if lhs == rhs => Ordering::Equal, + (Self::Min, _) | (_, Self::Max) => Ordering::Less, + (Self::Max, _) | (_, Self::Min) => Ordering::Greater, + } + } +} + +impl From for MinMaxExt { + fn from(value: T) -> Self { + MinMaxExt::Value(value) + } +} + +/// Helper macro to enable cast of key to dyn object and derive required traits for it. +/// Used to bypass limitation of [`Borrow`] which wouldn't allow to create object and return reference to it. +#[macro_export] +macro_rules! impl_as_dyn_key { + (target: $ty:ident, key: $key:ty, trait: $trait:ident) => { + /// Trait to key from type + pub trait $trait { + /// Extract key + fn as_key(&self) -> $key; + } + + impl $trait for $key { + fn as_key(&self) -> $key { + *self + } + } + + impl PartialEq for dyn $trait + '_ { + fn eq(&self, other: &Self) -> bool { + self.as_key() == other.as_key() + } + } + + impl Eq for dyn $trait + '_ {} + + impl PartialOrd for dyn $trait + '_ { + fn partial_cmp(&self, other: &Self) -> Option<::core::cmp::Ordering> { + self.as_key().partial_cmp(&other.as_key()) + } + } + + impl Ord for dyn $trait + '_ { + fn cmp(&self, other: &Self) -> ::core::cmp::Ordering { + self.as_key().cmp(&other.as_key()) + } + } + + impl<'lt> ::core::borrow::Borrow for $ty { + fn borrow(&self) -> &(dyn $trait + 'lt) { + self + } + } + }; +} + +/// TODO: good candidate for `prop_test` +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn any_larger_min() { + let values = [u64::MIN, u64::MAX]; + + for value in values { + assert!(MinMaxExt::Min < value.into()); + } + } + + #[test] + fn any_smaller_max() { + let values = [u64::MIN, u64::MAX]; + + for value in values { + assert!(MinMaxExt::Max > value.into()); + } + } + + #[test] + fn eq_still_eq() { + let values = [u64::MIN, u64::MAX]; + + for value in values { + assert!(MinMaxExt::from(value) == MinMaxExt::from(value)); + } + } +} diff --git a/primitives/src/const_vec.rs b/primitives/src/const_vec.rs index 859129fe3b3..30974346c16 100644 --- a/primitives/src/const_vec.rs +++ b/primitives/src/const_vec.rs @@ -9,23 +9,39 @@ use iroha_schema::{IntoSchema, MetaMap, Metadata, TypeId, VecMeta}; use parity_scale_codec::{WrapperTypeDecode, WrapperTypeEncode}; use serde::{Deserialize, Serialize}; -/// Stores bytes that are not supposed to change during the runtime of the program in a compact way -/// -/// This is a more efficient than `Vec` because it does not have to store the capacity field -/// -/// It does not do reference-counting, so cloning is not cheap -#[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default, Serialize, Deserialize)] -pub struct ConstVec(Box<[T]>); +use crate::ffi; + +ffi::ffi_item! { + /// Stores bytes that are not supposed to change during the runtime of the program in a compact way + /// + /// This is a more efficient than `Vec` because it does not have to store the capacity field + /// + /// It does not do reference-counting, so cloning is not cheap + #[derive(Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Default, Serialize, Deserialize)] + #[repr(transparent)] + pub struct ConstVec(Box<[T]>); + + // SAFETY: `ConstVec` has no trap representation in ConstVec + ffi_type(unsafe {robust}) +} impl ConstVec { /// Create a new `ConstVec` from something convertible into a `Box<[T]>`. /// /// Using `Vec` here would take ownership of the data without needing to copy it (if length is the same as capacity). + #[inline] pub fn new(content: impl Into>) -> Self { Self(content.into()) } + /// Creates an empty `ConstVec`. This operation does not allocate any memory. + #[inline] + pub fn new_empty() -> Self { + Self(Vec::new().into()) + } + /// Converts the `ConstVec` into a `Vec`, reusing the heap allocation. + #[inline] pub fn into_vec(self) -> Vec { self.0.into_vec() } diff --git a/primitives/src/conststr.rs b/primitives/src/conststr.rs index a3e49c450b3..1749d62475b 100644 --- a/primitives/src/conststr.rs +++ b/primitives/src/conststr.rs @@ -1,10 +1,4 @@ //! Const-string related implementation and structs. -#![allow( - clippy::std_instead_of_core, - clippy::undocumented_unsafe_blocks, - clippy::arithmetic_side_effects -)] - #[cfg(not(feature = "std"))] use alloc::{ borrow::ToOwned as _, @@ -140,7 +134,7 @@ impl Borrow for ConstString { impl Hash for ConstString { #[inline] fn hash(&self, state: &mut H) { - (**self).hash(state) + (**self).hash(state); } } @@ -478,7 +472,6 @@ impl TryFrom for InlinedString { } } -#[allow(clippy::restriction)] #[cfg(test)] mod tests { use super::*; @@ -688,6 +681,6 @@ mod tests { ] .into_iter() .map(str::to_owned) - .for_each(f) + .for_each(f); } } diff --git a/primitives/src/fixed.rs b/primitives/src/fixed.rs index 49ce809b792..1b01678da78 100644 --- a/primitives/src/fixed.rs +++ b/primitives/src/fixed.rs @@ -1,6 +1,4 @@ //! Types used for Fixed-point operations. Uses [`fixnum::FixedPoint`]. -#![allow(clippy::std_instead_of_core)] - #[cfg(not(feature = "std"))] use alloc::{ format, @@ -59,7 +57,6 @@ impl Fixed { #[inline] #[cfg(test)] pub fn negative_one() -> Self { - #[allow(clippy::unwrap_used)] Self("-1".parse().unwrap()) } @@ -245,7 +242,6 @@ pub mod prelude { #[cfg(test)] mod tests { - #![allow(clippy::restriction, clippy::panic)] use parity_scale_codec::DecodeAll; use super::*; @@ -287,7 +283,7 @@ mod tests { assert_eq!( result.unwrap_err().to_string(), "`-1.0`: negative value not allowed" - ) + ); } #[test] diff --git a/primitives/src/lib.rs b/primitives/src/lib.rs index a97ab4fd19c..1ec497539f0 100644 --- a/primitives/src/lib.rs +++ b/primitives/src/lib.rs @@ -12,6 +12,8 @@ extern crate alloc; pub mod addr; +pub mod cmpext; +#[cfg(not(feature = "ffi_import"))] pub mod const_vec; #[cfg(not(feature = "ffi_import"))] pub mod conststr; @@ -19,6 +21,7 @@ pub mod fixed; pub mod must_use; pub mod riffle_iter; pub mod small; +pub mod unique_vec; use fixed::prelude::*; diff --git a/primitives/src/small.rs b/primitives/src/small.rs index 2e39d05fee9..ddcd4e1635d 100644 --- a/primitives/src/small.rs +++ b/primitives/src/small.rs @@ -172,7 +172,7 @@ mod small_vector { /// Append an item to the vector. #[inline] pub fn push(&mut self, value: A::Item) { - self.0.push(value) + self.0.push(value); } /// Remove and return the element at position `index`, shifting all elements after it to the @@ -243,7 +243,7 @@ mod small_vector { impl Extend for SmallVec { fn extend>(&mut self, iter: T) { - self.0.extend(iter) + self.0.extend(iter); } } diff --git a/primitives/src/unique_vec.rs b/primitives/src/unique_vec.rs new file mode 100644 index 00000000000..4448aef397f --- /dev/null +++ b/primitives/src/unique_vec.rs @@ -0,0 +1,363 @@ +//! Module with [`UniqueVec`] type and related functional. + +#[cfg(not(feature = "std"))] +use alloc::{borrow::ToOwned as _, format, string::String, vec::Vec}; +use core::{ + borrow::Borrow, + fmt::{Debug, Display}, +}; + +use derive_more::{AsRef, Deref}; +use iroha_schema::IntoSchema; +use parity_scale_codec::{Decode, Encode}; +use serde::{Deserialize, Serialize}; + +/// Creates a [`UniqueVec`](unique_vec::UniqueVec) from a list of values. +/// +/// Works like [`vec!`] macro, but does not accept syntax for repeated values +/// and might return [`Result`]. +#[macro_export] +macro_rules! unique_vec { + () => { + $crate::unique_vec::UniqueVec::new() + }; + ($($x:expr),+ $(,)?) => {{ + let mut v = $crate::unique_vec::UniqueVec::new(); + $(v.push($x);)+ + v + }}; +} + +/// Wrapper type for [`Vec`] which ensures that all elements are unique. +#[derive( + Debug, + Deref, + AsRef, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Deserialize, + Serialize, + Encode, + Decode, + IntoSchema, +)] +pub struct UniqueVec(Vec); + +impl UniqueVec { + /// Create new [`UniqueVec`]. + #[must_use] + pub fn new() -> Self { + Self::default() + } + + /// Removes the element at the given `index` and returns it. + /// + /// # Panics + /// + /// Panics if the `index` is out of bounds. + pub fn remove(&mut self, index: usize) -> T { + self.0.remove(index) + } + + /// Clears the [`UniqueVec`], removing all values. + pub fn clear(&mut self) { + self.0.clear(); + } +} + +impl UniqueVec { + /// Push `value` to [`UniqueVec`] if it is not already present. + /// + /// Returns `true` if value was pushed and `false` if not. + pub fn push(&mut self, value: T) -> bool { + if self.contains(&value) { + false + } else { + self.0.push(value); + true + } + } +} + +impl Default for UniqueVec { + fn default() -> Self { + Self(Vec::new()) + } +} + +impl FromIterator for UniqueVec { + fn from_iter>(iter: I) -> Self { + let mut unique_vec = Self::new(); + unique_vec.extend(iter); + unique_vec + } +} + +impl Extend for UniqueVec { + fn extend>(&mut self, iter: I) { + for value in iter { + self.push(value); + } + } +} + +impl From> for Vec { + fn from(value: UniqueVec) -> Self { + value.0 + } +} + +impl Borrow<[T]> for UniqueVec { + fn borrow(&self) -> &[T] { + self.0.borrow() + } +} + +impl IntoIterator for UniqueVec { + type Item = T; + type IntoIter = as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +impl<'ve, T: PartialEq> IntoIterator for &'ve UniqueVec { + type Item = &'ve T; + type IntoIter = <&'ve Vec as IntoIterator>::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.0.iter() + } +} + +impl<'de, T: PartialEq + Deserialize<'de>> UniqueVec { + /// Deserialize [`UniqueVec`], failing on first duplicate. + /// + /// Default implementation of [`Deserialize`] for [`UniqueVec`] ignores duplicates. + /// + /// # Errors + /// + /// - If deserialization of `T` fails. + /// - If there are duplicates in the sequence. + /// + /// # Example + /// + /// ``` + /// use serde::{Deserialize, de::Error as _}; + /// use iroha_primitives::unique_vec::UniqueVec; + /// + /// #[derive(Debug, PartialEq, Deserialize)] + /// pub struct Config { + /// #[serde(deserialize_with = "UniqueVec::deserialize_failing_on_duplicates")] + /// numbers: UniqueVec, + /// } + /// + /// let err = serde_json::from_str::(r#"{"numbers": [1, 2, 3, 2, 4, 5]}"#).unwrap_err(); + /// assert_eq!( + /// err.to_string(), + /// "Duplicated value at line 1 column 25", + /// ); + /// ``` + pub fn deserialize_failing_on_duplicates(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Self::fail_on_duplicate_deserialize_impl(deserializer, |_value| { + "Duplicated value".to_owned() + }) + } +} + +impl<'de, T: Debug + PartialEq + Deserialize<'de>> UniqueVec { + /// Deserialize [`UniqueVec`], failing on first duplicate and printing it's [`Debug`] + /// representation. + /// + /// Default implementation of [`Deserialize`] for [`UniqueVec`] ignores duplicates. + /// + /// # Errors + /// + /// - If deserialization of `T` fails. + /// - If there are duplicates in the sequence. + /// + /// # Example + /// + /// ``` + /// use serde::{Deserialize, de::Error as _}; + /// use iroha_primitives::unique_vec::UniqueVec; + /// + /// #[derive(Debug, PartialEq, Deserialize)] + /// pub struct Config { + /// #[serde(deserialize_with = "UniqueVec::debug_deserialize_failing_on_duplicates")] + /// arrays: UniqueVec>, + /// } + /// + /// let err = serde_json::from_str::(r#"{"arrays": [[1, 2, 3], [9, 8], [1, 2, 3]]}"#).unwrap_err(); + /// assert_eq!( + /// err.to_string(), + /// "Duplicated value `[1, 2, 3]` at line 1 column 41", + /// ); + /// ``` + pub fn debug_deserialize_failing_on_duplicates(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Self::fail_on_duplicate_deserialize_impl(deserializer, |value| { + format!("Duplicated value `{value:?}`") + }) + } +} + +impl<'de, T: Display + PartialEq + Deserialize<'de>> UniqueVec { + /// Deserialize [`UniqueVec`], failing on first duplicate and printing it's [`Display`] + /// representation. + /// + /// Default implementation of [`Deserialize`] for [`UniqueVec`] ignores duplicates. + /// + /// # Errors + /// + /// - If deserialization of `T` fails. + /// - If there are duplicates in the sequence. + /// + /// # Example + /// + /// ``` + /// use serde::{Deserialize, de::Error as _}; + /// use iroha_primitives::unique_vec::UniqueVec; + /// + /// #[derive(Debug, PartialEq, Deserialize)] + /// pub struct Config { + /// #[serde(deserialize_with = "UniqueVec::display_deserialize_failing_on_duplicates")] + /// numbers: UniqueVec, + /// } + /// + /// let err = serde_json::from_str::(r#"{"numbers": [1, 2, 3, 2, 4, 5]}"#).unwrap_err(); + /// assert_eq!( + /// err.to_string(), + /// "Duplicated value `2` at line 1 column 25", + /// ); + /// ``` + pub fn display_deserialize_failing_on_duplicates(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Self::fail_on_duplicate_deserialize_impl(deserializer, |value| { + format!("Duplicated value `{value}`") + }) + } +} + +impl<'de, T: PartialEq + Deserialize<'de>> UniqueVec { + /// Deserialize [`UniqueVec`] calling `f` on duplicated value to get error message. + fn fail_on_duplicate_deserialize_impl(deserializer: D, f: F) -> Result + where + D: serde::Deserializer<'de>, + F: FnOnce(&T) -> String, + { + /// Helper, for constructing a unique visitor that errors whenever + /// a duplicate entry is found. + struct UniqueVisitor String> { + _marker: core::marker::PhantomData, + f: F, + } + + impl<'de, T, F> serde::de::Visitor<'de> for UniqueVisitor + where + T: Deserialize<'de> + PartialEq, + F: FnOnce(&T) -> String, + { + type Value = Vec; + + fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result { + formatter.write_str("a set of unique items.") + } + + fn visit_seq(self, mut seq: S) -> Result, S::Error> + where + S: serde::de::SeqAccess<'de>, + { + let mut result = Vec::with_capacity(seq.size_hint().unwrap_or(0)); + + while let Some(value) = seq.next_element()? { + if result.contains(&value) { + return Err(serde::de::Error::custom((self.f)(&value))); + } + result.push(value); + } + + Ok(result) + } + } + + let inner = deserializer.deserialize_seq(UniqueVisitor:: { + _marker: core::marker::PhantomData, + f, + })?; + Ok(UniqueVec(inner)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn default_creates_empty_vec() { + let unique_vec = UniqueVec::::default(); + assert!(unique_vec.is_empty()); + } + + #[test] + fn new_creates_empty_vec() { + let unique_vec = UniqueVec::::new(); + assert!(unique_vec.is_empty()); + } + + #[test] + fn push_returns_true_if_value_is_unique() { + let mut unique_vec = unique_vec![1, 3, 4]; + assert!(unique_vec.push(2)); + } + + #[test] + fn push_returns_false_if_value_is_not_unique() { + let mut unique_vec = unique_vec![1, 2, 3]; + assert!(!unique_vec.push(1)); + } + + #[test] + fn remove_returns_value_at_index() { + let mut unique_vec = unique_vec![1, 2, 3]; + assert_eq!(unique_vec.remove(1), 2); + } + + #[test] + #[should_panic] + fn remove_out_of_bounds_panics() { + let mut unique_vec = unique_vec![1, 2, 3]; + unique_vec.remove(3); + } + + #[test] + fn clear_removes_all_values() { + let mut unique_vec = unique_vec![1, 2, 3]; + unique_vec.clear(); + assert!(unique_vec.is_empty()); + } + + #[test] + fn from_iter_creates_unique_vec() { + let unique_vec = UniqueVec::from_iter([1, 1, 2, 3, 2]); + assert_eq!(unique_vec, unique_vec![1, 2, 3]); + } + + #[test] + fn extend_adds_unique_values() { + let mut unique_vec = unique_vec![1, 2, 3]; + unique_vec.extend([1, 2, 3, 4, 5]); + assert_eq!(unique_vec, unique_vec![1, 2, 3, 4, 5]); + } +} diff --git a/schema/Cargo.toml b/schema/Cargo.toml index d696b5c0426..3deae73c8c5 100644 --- a/schema/Cargo.toml +++ b/schema/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [dependencies] iroha_schema_derive = { workspace = true } @@ -15,5 +18,5 @@ fixnum = { workspace = true, features = ["i64"] } [dev-dependencies] parity-scale-codec = { workspace = true, default-features = false, features = ["derive", "full"] } +serde_json = { workspace = true, features = ["alloc"] } impls = { workspace = true } -serde_json = { workspace = true, features = ["std"] } diff --git a/schema/derive/Cargo.toml b/schema/derive/Cargo.toml index fa2fea8a9bf..d17a56721ee 100644 --- a/schema/derive/Cargo.toml +++ b/schema/derive/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true diff --git a/schema/derive/src/lib.rs b/schema/derive/src/lib.rs index b3eb3795b53..233a901e5cb 100644 --- a/schema/derive/src/lib.rs +++ b/schema/derive/src/lib.rs @@ -1,6 +1,5 @@ //! Crate with derive `IntoSchema` macro -#![allow(clippy::arithmetic_side_effects)] // darling-generated code triggers this lint #![allow(clippy::option_if_let_else)] @@ -26,7 +25,7 @@ fn impl_type_id(input: &mut syn2::DeriveInput) -> TokenStream { input.generics.type_params_mut().for_each(|ty_param| { ty_param .bounds - .push(syn2::parse_quote! {iroha_schema::TypeId}) + .push(syn2::parse_quote! {iroha_schema::TypeId}); }); let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); @@ -177,7 +176,7 @@ pub fn schema_derive(input: TokenStream) -> Result { input.generics.type_params_mut().for_each(|ty_param| { ty_param .bounds - .push(parse_quote! {iroha_schema::IntoSchema}) + .push(parse_quote! {iroha_schema::IntoSchema}); }); let mut emitter = Emitter::new(); diff --git a/schema/gen/Cargo.toml b/schema/gen/Cargo.toml index f231c08239b..53e89aa94b1 100644 --- a/schema/gen/Cargo.toml +++ b/schema/gen/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [dependencies] iroha_genesis = { workspace = true } iroha_primitives = { workspace = true } diff --git a/schema/gen/src/lib.rs b/schema/gen/src/lib.rs index 11712455494..ddac0fad272 100644 --- a/schema/gen/src/lib.rs +++ b/schema/gen/src/lib.rs @@ -1,11 +1,11 @@ //! Iroha schema generation support library. Contains the //! `build_schemas` `fn`, which is the function which decides which //! types are included in the schema. -#![allow(clippy::arithmetic_side_effects)] - use iroha_crypto::MerkleTree; use iroha_data_model::{ - block::stream::prelude::*, http::VersionedBatchedResponse, query::error::QueryExecutionFail, + block::stream::{BlockMessage, BlockSubscriptionRequest}, + query::error::QueryExecutionFail, + BatchedResponse, }; use iroha_genesis::RawGenesisBlock; use iroha_schema::prelude::*; @@ -19,7 +19,7 @@ macro_rules! types { $( $callback!($t); )+ #[cfg(target_arch = "aarch64")] - $callback!(Box); + $callback!(Box); }} } } @@ -42,18 +42,18 @@ pub fn build_schemas() -> MetaMap { schemas! { QueryExecutionFail, - VersionedBlockMessage, - VersionedBlockSubscriptionRequest, - VersionedEventMessage, - VersionedEventSubscriptionRequest, - VersionedBatchedResponse, - VersionedBatchedResponse>, - VersionedSignedQuery, + BlockMessage, + BlockSubscriptionRequest, + EventMessage, + EventSubscriptionRequest, + BatchedResponse, + BatchedResponse>, + SignedQuery, // Never referenced, but present in type signature. Like `PhantomData` - UpgradableBox, + MerkleTree, RegistrableBox, - MerkleTree, + UpgradableBox, // SDK devs want to know how to read serialized genesis block RawGenesisBlock, @@ -68,7 +68,7 @@ types!( AccountId, AccountPermissionChanged, AccountRoleChanged, - Action, + Action, Add, Algorithm, And, @@ -96,22 +96,21 @@ types!( BTreeMap, BTreeSet, BTreeSet, - BTreeSet, - BTreeSet>, BatchedResponse, - BatchedResponse>, + BatchedResponse>, + BatchedResponseV1, + BatchedResponseV1>, BlockHeader, BlockMessage, BlockRejectionReason, BlockSubscriptionRequest, Box, Box>, - Box, + Box, Box, Box, - BurnBox, - CommittedBlock, - Conditional, + BurnExpr, + ConditionalExpr, ConfigurationEvent, ConstString, Container, @@ -149,12 +148,12 @@ types!( EventMessage, EventSubscriptionRequest, Executable, - ExecuteTriggerBox, + ExecuteTriggerExpr, ExecuteTriggerEvent, ExecuteTriggerEventFilter, ExecutionTime, Expression, - FailBox, + Fail, FilterBox, FilterOpt, FilterOpt, @@ -191,7 +190,6 @@ types!( FindAllDomains, FindAllParameters, FindAllPeers, - FindPermissionTokenSchema, FindAllRoleIds, FindAllRoles, FindAllTransactions, @@ -209,6 +207,7 @@ types!( FindDomainById, FindDomainKeyValueByIdAndKey, FindError, + FindPermissionTokenSchema, FindPermissionTokensByAccountId, FindRoleByRoleId, FindRolesByAccountId, @@ -221,16 +220,16 @@ types!( FixNum, Fixed, ForwardCursor, - GrantBox, + GrantExpr, Greater, Hash, - HashOf>, - HashOf, - HashOf, + HashOf>, + HashOf, + HashOf, IdBox, IdentifiableBox, If, - InstructionBox, + InstructionExpr, InstructionExecutionFail, Interval, Interval, @@ -239,17 +238,16 @@ types!( Ipv4Predicate, Ipv6Addr, Ipv6Predicate, - IsAssetDefinitionOwner, LengthLimits, Less, - MerkleTree, + MerkleTree, Metadata, MetadataChanged, MetadataChanged, MetadataChanged, MetadataChanged, MetadataLimits, - MintBox, + MintExpr, Mintable, Mod, Multiply, @@ -257,7 +255,7 @@ types!( NewAccount, NewAssetDefinition, NewDomain, - NewParameterBox, + NewParameterExpr, NewRole, NonTrivial, NonZeroU64, @@ -267,9 +265,9 @@ types!( Option, Option, Option, - Option>>, - Option>, - Option, + Option>>, + Option>, + Option, Option, Option, Option, @@ -285,7 +283,7 @@ types!( OriginFilter, OriginFilter, OriginFilter, - Pair, + PairExpr, Parameter, ParameterId, Peer, @@ -309,11 +307,11 @@ types!( QueryExecutionFail, QueryPayload, RaiseTo, - RegisterBox, + RegisterExpr, RegistrableBox, - RemoveKeyValueBox, + RemoveKeyValueExpr, Repeats, - RevokeBox, + RevokeExpr, Role, RoleEvent, RoleEventFilter, @@ -323,20 +321,22 @@ types!( SemiInterval, SemiInterval, SemiRange, - SequenceBox, - SetKeyValueBox, - SetParameterBox, + SequenceExpr, + SetKeyValueExpr, + SetParameterExpr, Signature, SignatureCheckCondition, - SignatureOf, SignatureOf, SignatureOf, - SignatureWrapperOf, SignatureWrapperOf, - SignaturesOf, SignaturesOf, + SignedBlock, + SignedBlockV1, + SignedBlockWrapper, SignedQuery, + SignedQueryV1, SignedTransaction, + SignedTransactionV1, String, StringPredicate, Subtract, @@ -350,41 +350,31 @@ types!( TransactionQueryOutput, TransactionRejectionReason, TransactionValue, - TransferBox, - Trigger, + TransferExpr, + Trigger, + TriggerCompletedEventFilter, + TriggerCompletedOutcomeType, TriggerEvent, TriggerEventFilter, TriggerFilter, TriggerId, TriggerNumberOfExecutionsChanged, - TriggerCompletedEventFilter, - TriggerCompletedOutcomeType, TriggeringFilterBox, - UnregisterBox, + UnregisterExpr, UpgradableBox, ValidationFail, - Validator, - ValidatorEvent, + Executor, + ExecutorEvent, Value, ValueOfKey, ValuePredicate, Vec, - Vec, + Vec, Vec, Vec, + Vec, Vec, - Vec, Vec, - VersionedBatchedResponse, - VersionedBatchedResponse>, - VersionedBlockMessage, - VersionedBlockSubscriptionRequest, - VersionedCommittedBlock, - VersionedCommittedBlockWrapper, - VersionedEventMessage, - VersionedEventSubscriptionRequest, - VersionedSignedQuery, - VersionedSignedTransaction, WasmExecutionFail, WasmSmartContract, Where, @@ -416,14 +406,11 @@ mod tests { asset::NewAssetDefinition, block::{ error::BlockRejectionReason, - stream::{ - BlockMessage, BlockSubscriptionRequest, VersionedBlockMessage, - VersionedBlockSubscriptionRequest, - }, - BlockHeader, CommittedBlock, VersionedCommittedBlock, + stream::{BlockMessage, BlockSubscriptionRequest}, + BlockHeader, SignedBlock, SignedBlockV1, }, domain::NewDomain, - http::{BatchedResponse, VersionedBatchedResponse}, + executor::Executor, ipfs::IpfsPath, predicate::{ ip_addr::{Ipv4Predicate, Ipv6Predicate}, @@ -437,9 +424,8 @@ mod tests { error::{FindError, QueryExecutionFail}, ForwardCursor, }, - transaction::{error::TransactionLimitError, SignedTransaction, TransactionLimits}, - validator::Validator, - VersionedCommittedBlockWrapper, + transaction::{error::TransactionLimitError, SignedTransactionV1, TransactionLimits}, + BatchedResponse, BatchedResponseV1, SignedBlockWrapper, }; use iroha_genesis::RawGenesisBlock; use iroha_primitives::{ @@ -576,6 +562,6 @@ mod tests { fn no_schema_type_overlap() { let mut schemas = super::build_schemas(); >::update_schema_map(&mut schemas); - >::update_schema_map(&mut schemas); + >::update_schema_map(&mut schemas); } } diff --git a/schema/src/lib.rs b/schema/src/lib.rs index 3fed7592d9d..558dfca3796 100644 --- a/schema/src/lib.rs +++ b/schema/src/lib.rs @@ -40,6 +40,7 @@ impl MetaMap { } /// Create new [`Self`] + #[must_use] pub const fn new() -> MetaMap { Self(btree_map::BTreeMap::new()) } @@ -103,6 +104,7 @@ pub trait IntoSchema: TypeId { } /// Return schema map of types referenced by [`Self`] + #[must_use] fn schema() -> MetaMap { let mut map = MetaMap::new(); Self::update_schema_map(&mut map); diff --git a/schema/src/serialize.rs b/schema/src/serialize.rs index 2afe1abf1ff..de295b1fa81 100644 --- a/schema/src/serialize.rs +++ b/schema/src/serialize.rs @@ -72,7 +72,7 @@ impl Serialize for WithContext<'_, '_, NamedFieldsMeta> { let mut seq = serializer.serialize_seq(Some(self.data.declarations.len()))?; for declaration in &self.data.declarations { - seq.serialize_element(&declaration.add_ctx(self.context))? + seq.serialize_element(&declaration.add_ctx(self.context))?; } seq.end() @@ -116,7 +116,7 @@ impl Serialize for WithContext<'_, '_, EnumMeta> { let mut seq = serializer.serialize_seq(Some(self.data.variants.len()))?; for variant in &self.data.variants { - seq.serialize_element(&variant.add_ctx(self.context))? + seq.serialize_element(&variant.add_ctx(self.context))?; } seq.end() diff --git a/schema/tests/enum_with_various_discriminants.rs b/schema/tests/enum_with_various_discriminants.rs index 3cb6bb89529..b3c5ca2cdeb 100644 --- a/schema/tests/enum_with_various_discriminants.rs +++ b/schema/tests/enum_with_various_discriminants.rs @@ -1,10 +1,4 @@ // Lint triggers somewhere in Encode/Decode -#![allow( - trivial_numeric_casts, - clippy::unnecessary_cast, - clippy::std_instead_of_alloc -)] - use iroha_schema::prelude::*; use parity_scale_codec::{Decode, Encode}; diff --git a/schema/tests/fieldless_enum.rs b/schema/tests/fieldless_enum.rs index c1f506a95ff..d7cab1439c6 100644 --- a/schema/tests/fieldless_enum.rs +++ b/schema/tests/fieldless_enum.rs @@ -1,10 +1,4 @@ // Lint triggers somewhere in Encode/Decode -#![allow( - trivial_numeric_casts, - clippy::unnecessary_cast, - clippy::std_instead_of_alloc -)] - use iroha_schema::prelude::*; use parity_scale_codec::{Decode, Encode}; diff --git a/schema/tests/schema_json.rs b/schema/tests/schema_json.rs index bd1208141d6..4a98f3844ce 100644 --- a/schema/tests/schema_json.rs +++ b/schema/tests/schema_json.rs @@ -194,7 +194,7 @@ fn test_enum() { {"discriminant": 0, "tag": "Variant1", "type": "u32"}, {"discriminant": 1, "tag": "Variant3", "type": "String"} ]} - ) + ); } #[test] @@ -231,5 +231,5 @@ fn test_enum_codec_attr() { {"Enum": [ {"discriminant": 42, "tag": "Variant2", "type": "u32"} ]} - ) + ); } diff --git a/scripts/test_env.py b/scripts/test_env.py index 93bee9cb17a..42af70517cf 100755 --- a/scripts/test_env.py +++ b/scripts/test_env.py @@ -18,19 +18,20 @@ import urllib.error import urllib.request - class Network: """ A network of bootstrapped peers to run on bare metal. """ def __init__(self, args: argparse.Namespace): logging.info("Setting up test environment...") + + self.out_dir = args.out_dir peers_dir = args.out_dir.joinpath("peers") - os.mkdir(peers_dir) + os.makedirs(peers_dir, exist_ok=True) try: shutil.copy2(f"{args.root_dir}/configs/peer/config.json", peers_dir) shutil.copy2(f"{args.root_dir}/configs/peer/genesis.json", peers_dir) - shutil.copy2(f"{args.root_dir}/configs/peer/validator.wasm", peers_dir) + shutil.copy2(f"{args.root_dir}/configs/peer/executor.wasm", peers_dir) except FileNotFoundError: logging.error(f"Some of the config files are missing. \ Please provide them in the `{args.root_dir}/configs/peer` directory") @@ -50,14 +51,19 @@ def __init__(self, args: argparse.Namespace): peer_entry = {"address": f"{peer.host_ip}:{peer.p2p_port}", "public_key": peer.public_key} self.trusted_peers.append(json.dumps(peer_entry)) os.environ["SUMERAGI_TRUSTED_PEERS"] = f"[{','.join(self.trusted_peers)}]" - + + self.is_startup_benchmark = False + if args.benchmark: + del self.peers[1:] + self.is_startup_benchmark = True + def wait_for_genesis(self, n_tries: int): for i in range(n_tries): logging.info(f"Waiting for genesis block to be created... Attempt {i+1}/{n_tries}") try: - with urllib.request.urlopen(f"http://{self.peers[0].host_ip}:{self.peers[0].telemetry_port}/status/blocks") as response: + with urllib.request.urlopen(f"http://{self.peers[0].host_ip}:{self.peers[0].api_port}/status/blocks") as response: block_count = int(response.read()) - if block_count == 1: + if block_count >= 1: logging.info(f"Genesis block created. Block count: {block_count}") return else: @@ -67,12 +73,13 @@ def wait_for_genesis(self, n_tries: int): logging.info(f"Error connecting to genesis peer: {e}. Sleeping 1 second...") time.sleep(1) logging.critical(f"Genesis block wasn't created within {n_tries} seconds. Aborting...") + cleanup(self.out_dir) sys.exit(2) def run(self): - self.peers[0].run(is_genesis=True) + self.peers[0].run(self.is_startup_benchmark, is_genesis=True) for peer in self.peers[1:]: - peer.run() + peer.run(self.is_startup_benchmark) self.wait_for_genesis(20) class _Peer: @@ -85,7 +92,6 @@ def __init__(self, args: argparse.Namespace, nth: int): self.name = f"iroha{nth}" self.p2p_port = 1337 + nth self.api_port = 8080 + nth - self.telemetry_port = 8180 + nth self.tokio_console_port = 5555 + nth self.out_dir = args.out_dir self.root_dir = args.root_dir @@ -93,8 +99,10 @@ def __init__(self, args: argparse.Namespace, nth: int): logging.info(f"Peer {self.name} generating key pair...") - kagami = subprocess.run([f"{self.out_dir}/kagami","crypto", "-j"], - capture_output=True) + command = [f"{self.out_dir}/kagami", "crypto", "-j"] + if args.peer_name_as_seed: + command.extend(["-s", self.name]) + kagami = subprocess.run(command, capture_output=True) if kagami.returncode: logging.error("Kagami failed to generate a key pair.") sys.exit(3) @@ -105,12 +113,12 @@ def __init__(self, args: argparse.Namespace, nth: int): self.private_key = json.dumps(json_keypair['private_key']) logging.info(f"Peer {self.name} initialized") - - def run(self, is_genesis: bool = False): + + def run(self, is_startup_benchmark, is_genesis: bool = False): logging.info(f"Running peer {self.name}...") peer_dir = self.out_dir.joinpath(f"peers/{self.name}") - os.mkdir(peer_dir) - os.mkdir(peer_dir.joinpath("storage")) + os.makedirs(peer_dir, exist_ok=True) + os.makedirs(peer_dir.joinpath("storage"), exist_ok=True) os.environ["KURA_BLOCK_STORE_PATH"] = str(peer_dir.joinpath("storage")) os.environ["SNAPSHOT_DIR_PATH"] = str(peer_dir.joinpath("storage")) @@ -121,15 +129,24 @@ def run(self, is_genesis: bool = False): os.environ["SUMERAGI_DEBUG_FORCE_SOFT_FORK"] = "false" os.environ["TORII_P2P_ADDR"] = f"{self.host_ip}:{self.p2p_port}" os.environ["TORII_API_URL"] = f"{self.host_ip}:{self.api_port}" - os.environ["TORII_TELEMETRY_URL"] = f"{self.host_ip}:{self.telemetry_port}" os.environ["TOKIO_CONSOLE_ADDR"] = f"{self.host_ip}:{self.tokio_console_port}" - genesis_arg = "--submit-genesis" if is_genesis else "" - # FD never gets closed - log_file = open(peer_dir.joinpath(".log"), "w") - # These processes are created detached from the parent process already - subprocess.Popen([self.name, genesis_arg], executable=f"{self.out_dir}/peers/iroha", - stdout=log_file, stderr=subprocess.STDOUT) + if is_startup_benchmark: + os.environ["IROHA_EXIT_AFTER_INIT"] = "true" + shutil.copy2(f"{self.root_dir}/benchmark_blockstore/blocks.data", f"{self.out_dir}/peers/{self.name}/storage/blocks.data") + shutil.copy2(f"{self.root_dir}/benchmark_blockstore/blocks.index", f"{self.out_dir}/peers/{self.name}/storage/blocks.index") + start_time = time.time_ns() + subprocess.run([f"{self.out_dir}/peers/iroha"]) + runtime = time.time_ns() - start_time + print("Took " + str(float(runtime) / 1000000.0) + " ms") + exit() + else: + genesis_arg = "--submit-genesis" if is_genesis else "" + # FD never gets closed + log_file = open(peer_dir.joinpath(".log"), "w") + # These processes are created detached from the parent process already + subprocess.Popen([self.name, genesis_arg], executable=f"{self.out_dir}/peers/iroha", + stdout=log_file, stderr=subprocess.STDOUT) def pos_int(arg): if int(arg) > 0: @@ -155,12 +172,12 @@ def copy_or_prompt_build_bin(bin_name: str, root_dir: pathlib.Path, target_dir: logging.critical("Can't launch the network without the binary. Aborting...") sys.exit(4) else: - logging.error("Please answer with either `y[es]` or `n[o])") + logging.error("Please answer with either `y[es]` or `n[o]`") -def main(args): +def main(args: argparse.Namespace): # Bold ASCII escape sequence logging.basicConfig(level=logging.INFO if args.verbose else logging.WARNING, - style="{", + style="{", format="{asctime} {levelname} \033[1m{funcName}:{lineno}\033[0m: {message}",) # ISO 8601 timestamps without timezone logging.Formatter.formatTime = (lambda self, record, datefmt=None: @@ -170,18 +187,15 @@ def main(args): logging.addLevelName(logging.INFO, f"\033[32m{logging.getLevelName(logging.INFO)}\033[0m") logging.addLevelName(logging.ERROR, f"\033[35m{logging.getLevelName(logging.ERROR)}\033[0m") logging.addLevelName(logging.CRITICAL, f"\033[31m{logging.getLevelName(logging.CRITICAL)}\033[0m") + if args.command == "setup": setup(args) elif args.command == "cleanup": - cleanup(args) + cleanup(args.out_dir) -def setup(args): +def setup(args: argparse.Namespace): logging.info(f"Starting iroha network with {args.n_peers} peers...") - try: - os.mkdir(args.out_dir) - except FileExistsError: - logging.error(f"Test directory `{args.out_dir}` already exists") - sys.exit(5) + os.makedirs(args.out_dir, exist_ok=True) copy_or_prompt_build_bin("iroha_client_cli", args.root_dir, args.out_dir) with open(os.path.join(args.out_dir, "metadata.json"), "w") as f: f.write('{"comment":{"String": "Hello Meta!"}}') @@ -190,12 +204,11 @@ def setup(args): Network(args).run() -def cleanup(args): - setup_dir = args.out_dir +def cleanup(out_dir: pathlib.Path): logging.info("Killing peer processes...") subprocess.run(["pkill", "-9", "iroha"]) - logging.info(f"Cleaning up test directory `{setup_dir}`...") - shutil.rmtree(setup_dir) + logging.info(f"Cleaning up test directory `{out_dir}`...") + shutil.rmtree(out_dir) @@ -227,9 +240,14 @@ def cleanup(args): help="Directory containing Iroha project root. \ Defaults to `.`, i.e. the directory script is being run from. \ This is used to locate the `iroha` binary and config files") + parser.add_argument("--peer-name-as-seed", action="store_true", + help="Use peer name as seed for key generation. \ + This option could be useful to preserve the same peer keys between script invocations") parser.add_argument("--verbose", "-v", action="store_true", help="Enable verbose output") + parser.add_argument("--benchmark", "-b", action="store_true", + help="Perform the startup time benchmark with the benchmark chain") args = parser.parse_args() main(args) diff --git a/scripts/check.sh b/scripts/tests/consistency.sh similarity index 88% rename from scripts/check.sh rename to scripts/tests/consistency.sh index 74dafbf2b88..dd5a5291a5c 100755 --- a/scripts/check.sh +++ b/scripts/tests/consistency.sh @@ -8,8 +8,8 @@ case $1 in exit 1 };; "genesis") - cargo run --release --bin kagami -- genesis --validator-path-in-genesis ./validator.wasm | diff - configs/peer/genesis.json || { - echo 'Please re-generate the genesis with `cargo run --release --bin kagami -- genesis --validator-path-in-genesis ./validator.wasm > configs/peer/genesis.json`' + cargo run --release --bin kagami -- genesis --executor-path-in-genesis ./executor.wasm | diff - configs/peer/genesis.json || { + echo 'Please re-generate the genesis with `cargo run --release --bin kagami -- genesis --executor-path-in-genesis ./executor.wasm > configs/peer/genesis.json`' exit 1 };; "client") @@ -39,7 +39,7 @@ case $1 in eval "$full_cmd" diff "$temp_file" "$target" || { - echo "Please re-generate \`$target\` with \`$full_cmd\`" + echo "Please re-generate \`$target\` with \`$cmd_base --outfile $target\`" exit 1 } } diff --git a/scripts/tests/genesis.sh b/scripts/tests/genesis.sh deleted file mode 100755 index 87c3a03c3d8..00000000000 --- a/scripts/tests/genesis.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -set -ex -TEST=${TEST:-"./test"} -CMD="$TEST/iroha_client_cli --config $TEST/config.json" -$CMD asset get --account alice@wonderland --asset 'rose#wonderland' | grep -q '"Quantity": 13' -sleep ${SLEEP:-10} diff --git a/scripts/tests/panic_on_invalid_genesis.sh b/scripts/tests/panic_on_invalid_genesis.sh index 21cdd103c2c..ed95926b645 100755 --- a/scripts/tests/panic_on_invalid_genesis.sh +++ b/scripts/tests/panic_on_invalid_genesis.sh @@ -3,7 +3,6 @@ set -ex # Setup env export TORII_P2P_ADDR='127.0.0.1:1341' export TORII_API_URL='127.0.0.1:8084' -export TORII_TELEMETRY_URL='127.0.0.1:8184' export IROHA_PUBLIC_KEY='ed01201C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B' export IROHA_PRIVATE_KEY='{"digest_function": "ed25519", "payload": "282ED9F3CF92811C3818DBC4AE594ED59DC1A2F78E4241E31924E101D6B1FB831C61FAF8FE94E253B93114240394F79A607B7FA55F9E5A41EBEC74B88055768B"}' export IROHA_GENESIS_ACCOUNT_PUBLIC_KEY='ed01203F4E3E98571B55514EDC5CCF7E53CA7509D89B2868E62921180A6F57C2F4E255' diff --git a/scripts/tests/register_mint_quantity.sh b/scripts/tests/register_mint_quantity.sh deleted file mode 100755 index d589c6307c5..00000000000 --- a/scripts/tests/register_mint_quantity.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -set -ex -TEST=${TEST:-"./test"} -CMD="$TEST/iroha_client_cli --config $TEST/config.json" -$CMD domain register --id="Soramitsu" --metadata="$TEST/metadata.json" -sleep 2 -$CMD account register --id="Alice@Soramitsu" --key="ed0120A753146E75B910AE5E2994DC8ADEA9E7D87E5D53024CFA310CE992F17106F92C" -sleep 2 -$CMD asset register --id="XOR#Soramitsu" --value-type=Quantity -sleep 2 -$CMD asset mint --account="Alice@Soramitsu" --asset="XOR#Soramitsu" --quantity="100" -sleep 2 -$CMD asset get --account="Alice@Soramitsu" --asset="XOR#Soramitsu" | grep -q '"Quantity": 100' diff --git a/scripts/update_configs.sh b/scripts/update_configs.sh index 876ad654ceb..ad20cd9f8b8 100755 --- a/scripts/update_configs.sh +++ b/scripts/update_configs.sh @@ -22,4 +22,4 @@ curl https://raw.githubusercontent.com/hyperledger/iroha/iroha2-lts/configs/peer curl https://raw.githubusercontent.com/hyperledger/iroha/iroha2-stable/configs/peer/config.json -o ./configs/peer/$1/config.json curl https://raw.githubusercontent.com/hyperledger/iroha/iroha2-stable/configs/peer/genesis.json -o ./configs/peer/$1/genesis.json -curl https://raw.githubusercontent.com/hyperledger/iroha/iroha2-stable/configs/peer/genesis.json -o ./configs/peer/$1/validator.wasm +curl https://raw.githubusercontent.com/hyperledger/iroha/iroha2-stable/configs/peer/executor.wasm -o ./configs/peer/$1/executor.wasm diff --git a/smart_contract/.cargo/config.toml b/smart_contract/.cargo/config.toml new file mode 100644 index 00000000000..10c68cf82ce --- /dev/null +++ b/smart_contract/.cargo/config.toml @@ -0,0 +1,2 @@ +[target.wasm32-unknown-unknown] +runner = "iroha_wasm_test_runner" diff --git a/smart_contract/Cargo.toml b/smart_contract/Cargo.toml new file mode 100644 index 00000000000..b0d9950600f --- /dev/null +++ b/smart_contract/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "iroha_smart_contract" + +edition.workspace = true +version.workspace = true +authors.workspace = true + +license.workspace = true + +[lints] +workspace = true + +[features] +# Enables debugging tools such as `dbg()` and `DebugUnwrapExt` +debug = [] + +[dependencies] +iroha_data_model.workspace = true +iroha_macro.workspace = true +iroha_smart_contract_utils.workspace = true +iroha_smart_contract_derive.workspace = true + +parity-scale-codec.workspace = true +derive_more.workspace = true + +[dev-dependencies] +webassembly-test = "0.1.0" + diff --git a/wasm/LICENSE b/smart_contract/LICENSE similarity index 100% rename from wasm/LICENSE rename to smart_contract/LICENSE diff --git a/wasm/README.md b/smart_contract/README.md similarity index 91% rename from wasm/README.md rename to smart_contract/README.md index 8629f9966fd..430c73f33ec 100644 --- a/wasm/README.md +++ b/smart_contract/README.md @@ -8,10 +8,10 @@ Check the [WASM section of our tutorial](https://hyperledger.github.io/iroha-2-d ## Running tests -To be able to run tests compiled for `wasm32-unknown-unknown` target install `webassembly-test-runner`: +To be able to run tests compiled for `wasm32-unknown-unknown` target install `iroha_wasm_test_runner` from the root of the iroha repository: ```bash -cargo install webassembly-test-runner +cargo install --path tools/wasm_test_runner ``` Then run tests: @@ -49,7 +49,7 @@ By following this list of optimization steps you can reduce the size of your bin [dependencies] iroha_data_model = { git = "https://github.com/hyperledger/iroha/", branch = "iroha2", default-features = false } - iroha_wasm = { git = "https://github.com/hyperledger/iroha/", branch = "iroha2" } + iroha_smart_contract = { git = "https://github.com/hyperledger/iroha/", branch = "iroha2" } panic-halt = "0.2.0" ``` diff --git a/wasm/derive/Cargo.toml b/smart_contract/derive/Cargo.toml similarity index 78% rename from wasm/derive/Cargo.toml rename to smart_contract/derive/Cargo.toml index aea05d5be96..72658aa9aa8 100644 --- a/wasm/derive/Cargo.toml +++ b/smart_contract/derive/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "iroha_wasm_derive" +name = "iroha_smart_contract_derive" version.workspace = true authors.workspace = true @@ -7,11 +7,13 @@ edition.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true [dependencies] -iroha_data_model.workspace = true syn.workspace = true quote.workspace = true proc-macro2.workspace = true diff --git a/wasm/derive/src/entrypoint.rs b/smart_contract/derive/src/entrypoint.rs similarity index 69% rename from wasm/derive/src/entrypoint.rs rename to smart_contract/derive/src/entrypoint.rs index 1a01dfc83cf..4970b406ea3 100644 --- a/wasm/derive/src/entrypoint.rs +++ b/smart_contract/derive/src/entrypoint.rs @@ -1,11 +1,13 @@ //! Macro for writing smart contract entrypoint -#![allow(clippy::str_to_string)] - use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, parse_quote}; +mod export { + pub const SMART_CONTRACT_MAIN: &str = "_iroha_smart_contract_main"; +} + #[allow(clippy::needless_pass_by_value)] pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { let syn::ItemFn { @@ -25,27 +27,27 @@ pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { block.stmts.insert( 0, parse_quote!( - use ::iroha_wasm::{debug::DebugExpectExt as _, ExecuteOnHost as _, QueryHost as _}; + use ::iroha_smart_contract::{ + debug::DebugExpectExt as _, ExecuteOnHost as _, ExecuteQueryOnHost as _, + }; ), ); - let main_fn_name = syn::Ident::new( - iroha_data_model::wasm::export::fn_names::SMART_CONTRACT_MAIN, - proc_macro2::Span::call_site(), - ); + let main_fn_name = syn::Ident::new(export::SMART_CONTRACT_MAIN, proc_macro2::Span::call_site()); quote! { /// Smart contract entrypoint #[no_mangle] #[doc(hidden)] unsafe extern "C" fn #main_fn_name() { - let payload = ::iroha_wasm::get_smart_contract_payload(); + let payload = ::iroha_smart_contract::get_smart_contract_payload(); #fn_name(payload.owner) } // NOTE: Host objects are always passed by value to wasm #[allow(clippy::needless_pass_by_value)] #(#attrs)* + #[inline] #vis #sig #block } diff --git a/wasm/derive/src/lib.rs b/smart_contract/derive/src/lib.rs similarity index 95% rename from wasm/derive/src/lib.rs rename to smart_contract/derive/src/lib.rs index f1390145b3f..af82cd24fbe 100644 --- a/wasm/derive/src/lib.rs +++ b/smart_contract/derive/src/lib.rs @@ -18,7 +18,7 @@ mod entrypoint; // /// Using without parameters: /// ```ignore -/// #[iroha_wasm::main] +/// #[iroha_smart_contract::main] /// fn main(owner: AccountId) { /// todo!() /// } diff --git a/smart_contract/executor/Cargo.toml b/smart_contract/executor/Cargo.toml new file mode 100644 index 00000000000..6e0eb5bcda7 --- /dev/null +++ b/smart_contract/executor/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "iroha_executor" + +version.workspace = true +authors.workspace = true +edition.workspace = true +license.workspace = true + +[lints] +workspace = true + +[features] +# Enables debugging tools such as `dbg()` and `DebugUnwrapExt` +debug = ["iroha_smart_contract/debug"] + +[dependencies] +iroha_smart_contract_utils.workspace = true +iroha_smart_contract.workspace = true +iroha_executor_derive.workspace = true +iroha_data_model.workspace = true +iroha_schema.workspace = true + +serde.workspace = true +serde_json.workspace = true diff --git a/smart_contract/executor/derive/Cargo.toml b/smart_contract/executor/derive/Cargo.toml new file mode 100644 index 00000000000..ce43975f33a --- /dev/null +++ b/smart_contract/executor/derive/Cargo.toml @@ -0,0 +1,25 @@ +[package] +name = "iroha_executor_derive" + +version.workspace = true +authors.workspace = true +edition.workspace = true + +license.workspace = true + +[lints] +workspace = true + + +[lib] +proc-macro = true + +[dependencies] +iroha_data_model.workspace = true +iroha_macro_utils.workspace = true +syn = { workspace = true, features = ["full", "derive"] } +syn2 = { workspace = true, features = ["full", "derive"] } +quote.workspace = true +proc-macro2.workspace = true +manyhow.workspace = true +darling.workspace = true diff --git a/wasm/validator/derive/src/conversion.rs b/smart_contract/executor/derive/src/conversion.rs similarity index 77% rename from wasm/validator/derive/src/conversion.rs rename to smart_contract/executor/derive/src/conversion.rs index f0dd6a648ff..87b27becbb5 100644 --- a/wasm/validator/derive/src/conversion.rs +++ b/smart_contract/executor/derive/src/conversion.rs @@ -9,7 +9,7 @@ pub fn impl_derive_ref_into_asset_owner(input: TokenStream) -> TokenStream { impl_from( &input.ident, &input.generics, - &syn::parse_quote!(::iroha_validator::permission::asset::Owner), + &syn::parse_quote!(::iroha_executor::permission::asset::Owner), &syn::parse_quote!(asset_id), ) .into() @@ -23,7 +23,7 @@ pub fn impl_derive_ref_into_asset_definition_owner(input: TokenStream) -> TokenS impl_from( &input.ident, &input.generics, - &syn::parse_quote!(::iroha_validator::permission::asset_definition::Owner), + &syn::parse_quote!(::iroha_executor::permission::asset_definition::Owner), &syn::parse_quote!(asset_definition_id), ) .into() @@ -36,12 +36,25 @@ pub fn impl_derive_ref_into_account_owner(input: TokenStream) -> TokenStream { impl_from( &input.ident, &input.generics, - &syn::parse_quote!(::iroha_validator::permission::account::Owner), + &syn::parse_quote!(::iroha_executor::permission::account::Owner), &syn::parse_quote!(account_id), ) .into() } +/// [`derive_ref_into_domain_owner`](crate::derive_ref_into_domain_owner) macro implementation +pub fn impl_derive_ref_into_domain_owner(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + + impl_from( + &input.ident, + &input.generics, + &syn::parse_quote!(::iroha_executor::permission::domain::Owner), + &syn::parse_quote!(domain_id), + ) + .into() +} + fn impl_from( ident: &syn::Ident, generics: &syn::Generics, diff --git a/smart_contract/executor/derive/src/default.rs b/smart_contract/executor/derive/src/default.rs new file mode 100644 index 00000000000..6045bd40b82 --- /dev/null +++ b/smart_contract/executor/derive/src/default.rs @@ -0,0 +1,366 @@ +use darling::{ast::NestedMeta, FromDeriveInput, FromMeta}; +use iroha_macro_utils::Emitter; +use manyhow::emit; +use proc_macro2::{Span, TokenStream as TokenStream2}; +use quote::{quote, ToTokens}; +use syn2::{parse_quote, Ident}; + +type ExecutorData = darling::ast::Data; + +#[derive(Debug)] +struct Custom(Vec); + +impl FromMeta for Custom { + fn from_list(items: &[NestedMeta]) -> darling::Result { + let mut res = Vec::new(); + for item in items { + if let NestedMeta::Meta(syn2::Meta::Path(p)) = item { + let fn_name = p.get_ident().expect("Path should be ident"); + res.push(fn_name.clone()); + } else { + return Err(darling::Error::custom( + "Invalid path list supplied to `omit` attribute", + )); + } + } + Ok(Self(res)) + } +} + +#[derive(FromDeriveInput, Debug)] +#[darling(supports(struct_named), attributes(visit, entrypoints))] +struct ExecutorDeriveInput { + ident: Ident, + data: ExecutorData, + custom: Option, +} + +pub fn impl_derive_entrypoints(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream2 { + let Some(input) = emitter.handle(ExecutorDeriveInput::from_derive_input(input)) else { + return quote!(); + }; + let ExecutorDeriveInput { + ident, + data, + custom, + .. + } = &input; + check_required_fields(data, emitter); + + let (custom_idents, custom_args) = custom_field_idents_and_fn_args(data); + + let mut entrypoint_fns: Vec = vec![ + parse_quote! { + #[::iroha_executor::prelude::entrypoint] + pub fn validate_instruction( + authority: ::iroha_executor::prelude::AccountId, + instruction: ::iroha_executor::prelude::InstructionExpr, + block_height: u64, + #(#custom_args),* + ) -> ::iroha_executor::prelude::Result { + let mut executor = #ident::new(block_height, #(#custom_idents),*); + executor.visit_instruction(&authority, &instruction); + ::core::mem::forget(instruction); + executor.verdict + } + }, + parse_quote! { + #[::iroha_executor::prelude::entrypoint] + pub fn validate_transaction( + authority: ::iroha_executor::prelude::AccountId, + transaction: ::iroha_executor::prelude::SignedTransaction, + block_height: u64, + #(#custom_args),* + ) -> ::iroha_executor::prelude::Result { + let mut executor = #ident::new(block_height, #(#custom_idents),*); + executor.visit_transaction(&authority, &transaction); + ::core::mem::forget(transaction); + executor.verdict + } + }, + parse_quote! { + #[::iroha_executor::prelude::entrypoint] + pub fn validate_query( + authority: ::iroha_executor::prelude::AccountId, + query: ::iroha_executor::prelude::QueryBox, + block_height: u64, + #(#custom_args),* + ) -> ::iroha_executor::prelude::Result { + let mut executor = #ident::new(block_height, #(#custom_idents),*); + executor.visit_query(&authority, &query); + ::core::mem::forget(query); + executor.verdict + } + }, + ]; + if let Some(custom) = custom { + entrypoint_fns.retain(|entrypoint| { + !custom + .0 + .iter() + .any(|fn_name| fn_name == &entrypoint.sig.ident) + }); + } + + quote! { + #(#entrypoint_fns)* + } +} + +pub fn impl_derive_visit(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream2 { + let Some(input) = emitter.handle(ExecutorDeriveInput::from_derive_input(input)) else { + return quote!(); + }; + let ExecutorDeriveInput { ident, custom, .. } = &input; + let default_visit_sigs: Vec = [ + "fn visit_unsupported(operation: T)", + "fn visit_transaction(operation: &SignedTransaction)", + "fn visit_instruction(operation: &InstructionExpr)", + "fn visit_expression(operation: &EvaluatesTo)", + "fn visit_sequence(operation: &SequenceExpr)", + "fn visit_if(operation: &ConditionalExpr)", + "fn visit_pair(operation: &PairExpr)", + "fn visit_unregister_peer(operation: Unregister)", + "fn visit_unregister_domain(operation: Unregister)", + "fn visit_transfer_domain(operation: Transfer)", + "fn visit_set_domain_key_value(operation: SetKeyValue)", + "fn visit_remove_domain_key_value(operation: RemoveKeyValue)", + "fn visit_unregister_account(operation: Unregister)", + "fn visit_mint_account_public_key(operation: Mint)", + "fn visit_burn_account_public_key(operation: Burn)", + "fn visit_mint_account_signature_check_condition(operation: Mint)", + "fn visit_set_account_key_value(operation: SetKeyValue)", + "fn visit_remove_account_key_value(operation: RemoveKeyValue)", + "fn visit_register_asset(operation: Register)", + "fn visit_unregister_asset(operation: Unregister)", + "fn visit_mint_asset(operation: Mint)", + "fn visit_burn_asset(operation: Burn)", + "fn visit_transfer_asset(operation: Transfer)", + "fn visit_set_asset_key_value(operation: SetKeyValue)", + "fn visit_remove_asset_key_value(operation: RemoveKeyValue)", + "fn visit_unregister_asset_definition(operation: Unregister)", + "fn visit_transfer_asset_definition(operation: Transfer)", + "fn visit_set_asset_definition_key_value(operation: SetKeyValue)", + "fn visit_remove_asset_definition_key_value(operation: RemoveKeyValue)", + "fn visit_grant_account_permission(operation: Grant)", + "fn visit_revoke_account_permission(operation: Revoke)", + "fn visit_register_role(operation: Register)", + "fn visit_unregister_role(operation: Unregister)", + "fn visit_grant_account_role(operation: Grant)", + "fn visit_revoke_account_role(operation: Revoke)", + "fn visit_unregister_trigger(operation: Unregister>)", + "fn visit_mint_trigger_repetitions(operation: Mint>)", + "fn visit_burn_trigger_repetitions(operation: Burn>)", + "fn visit_execute_trigger(operation: ExecuteTrigger)", + "fn visit_set_parameter(operation: SetParameter)", + "fn visit_new_parameter(operation: NewParameter)", + "fn visit_upgrade_executor(operation: Upgrade)", + ] + .into_iter() + .map(|item| { + let mut sig: syn2::Signature = + syn2::parse_str(item).expect("Function names and operation signatures should be valid"); + let recv_arg: syn2::Receiver = parse_quote!(&mut self); + let auth_arg: syn2::FnArg = parse_quote!(authority: &AccountId); + sig.inputs.insert(0, recv_arg.into()); + sig.inputs.insert(1, auth_arg); + sig + }) + .collect(); + + let visit_items = default_visit_sigs + .iter() + .map(|visit_sig| { + let curr_fn_name = &visit_sig.ident; + let local_override_fn = quote! { + #visit_sig { + #curr_fn_name(self, authority, operation) + } + }; + let default_override_fn = quote! { + #visit_sig { + ::iroha_executor::default::#curr_fn_name(self, authority, operation) + } + }; + if let Some(fns_to_exclude) = custom { + if fns_to_exclude + .0 + .iter() + .any(|fn_name| fn_name == &visit_sig.ident) + { + local_override_fn + } else { + default_override_fn + } + } else { + default_override_fn + } + }) + .collect::>(); + + quote! { + impl ::iroha_executor::prelude::Visit for #ident { + #(#visit_items)* + } + } +} + +pub fn impl_derive_validate(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream2 { + let Some(input) = emitter.handle(ExecutorDeriveInput::from_derive_input(input)) else { + return quote!(); + }; + let ExecutorDeriveInput { ident, data, .. } = &input; + check_required_fields(data, emitter); + quote! { + impl ::iroha_executor::Validate for #ident { + fn verdict(&self) -> &::iroha_executor::prelude::Result { + &self.verdict + } + + fn block_height(&self) -> u64 { + self.block_height + } + + fn deny(&mut self, reason: ::iroha_executor::prelude::ValidationFail) { + self.verdict = Err(reason); + } + } + } +} + +pub fn impl_derive_expression_evaluator( + emitter: &mut Emitter, + input: &syn2::DeriveInput, +) -> TokenStream2 { + let Some(input) = emitter.handle(ExecutorDeriveInput::from_derive_input(input)) else { + return quote!(); + }; + let ExecutorDeriveInput { ident, data, .. } = &input; + check_required_fields(data, emitter); + quote! { + impl ::iroha_executor::data_model::evaluate::ExpressionEvaluator for #ident { + fn evaluate( + &self, + expression: &E, + ) -> ::core::result::Result + { + self.host.evaluate(expression) + } + } + + } +} + +pub fn impl_derive_constructor(emitter: &mut Emitter, input: &syn2::DeriveInput) -> TokenStream2 { + let Some(input) = emitter.handle(ExecutorDeriveInput::from_derive_input(input)) else { + return quote!(); + }; + let ExecutorDeriveInput { ident, data, .. } = &input; + + check_required_fields(data, emitter); + + let (custom_idents, custom_args) = custom_field_idents_and_fn_args(data); + + // Returning an inherent impl is okay here as there can be multiple + quote! { + impl #ident { + pub fn new(block_height: u64, #(#custom_args),*) -> Self { + Self { + verdict: Ok(()), + block_height, + host: ::iroha_executor::smart_contract::Host, + #(#custom_idents),* + } + } + } + + } +} + +fn check_required_fields(ast: &ExecutorData, emitter: &mut Emitter) { + let required_fields: syn2::FieldsNamed = parse_quote!({ verdict: ::iroha_executor::prelude::Result, block_height: u64, host: ::iroha_executor::smart_contract::Host }); + let struct_fields = ast + .as_ref() + .take_struct() + .expect("BUG: ExecutorDeriveInput is allowed to contain struct data only") + .fields; + required_fields.named.iter().for_each(|required_field| { + if !struct_fields.iter().any(|struct_field| { + struct_field.ident == required_field.ident + && check_type_equivalence(&required_field.ty, &struct_field.ty) + }) { + emit!( + emitter, + Span::call_site(), + "The struct didn't have the required field named `{}` of type `{}`", + required_field + .ident + .as_ref() + .expect("Required field should be named"), + required_field.ty.to_token_stream() + ) + } + }); +} + +/// Check that the required fields of an `Executor` are of the correct types. As +/// the types can be completely or partially unqualified, we need to go through the type path segments to +/// determine equivalence. We can't account for any aliases though +fn check_type_equivalence(full_ty: &syn2::Type, given_ty: &syn2::Type) -> bool { + match (full_ty, given_ty) { + (syn2::Type::Path(full_ty_path), syn2::Type::Path(given_ty_path)) => { + if full_ty_path.path.segments.len() == given_ty_path.path.segments.len() { + full_ty_path == given_ty_path + } else { + full_ty_path + .path + .segments + .iter() + .rev() + .zip(given_ty_path.path.segments.iter().rev()) + .all(|(full_seg, given_seg)| full_seg == given_seg) + } + } + _ => false, + } +} + +/// Processes an `Executor` by draining it of default fields and returning the idents of the +/// custom fields and the corresponding function arguments for use in the constructor +fn custom_field_idents_and_fn_args(ast: &ExecutorData) -> (Vec<&Ident>, Vec) { + let required_idents: Vec = ["verdict", "block_height", "host"] + .iter() + .map(|s| Ident::new(s, Span::call_site())) + .collect(); + let mut custom_fields = ast + .as_ref() + .take_struct() + .expect("BUG: ExecutorDeriveInput is allowed to contain struct data only") + .fields; + custom_fields.retain(|field| { + let curr_ident = field + .ident + .as_ref() + .expect("BUG: Struct should have named fields"); + !required_idents.iter().any(|ident| ident == curr_ident) + }); + let custom_idents = custom_fields + .iter() + .map(|field| { + field + .ident + .as_ref() + .expect("BUG: Struct should have named fields") + }) + .collect::>(); + let custom_args = custom_fields + .iter() + .map(|field| { + let ident = &field.ident; + let ty = &field.ty; + let field_arg: syn2::FnArg = parse_quote!(#ident: #ty); + field_arg + }) + .collect::>(); + (custom_idents, custom_args) +} diff --git a/wasm/validator/derive/src/entrypoint.rs b/smart_contract/executor/derive/src/entrypoint.rs similarity index 60% rename from wasm/validator/derive/src/entrypoint.rs rename to smart_contract/executor/derive/src/entrypoint.rs index 73ceb77538c..3e4e6daa601 100644 --- a/wasm/validator/derive/src/entrypoint.rs +++ b/smart_contract/executor/derive/src/entrypoint.rs @@ -1,15 +1,28 @@ -//! Module [`validator_entrypoint`](crate::validator_entrypoint) macro implementation +//! Module [`executor_entrypoint`](crate::executor_entrypoint) macro implementation use super::*; -/// [`validator_entrypoint`](crate::validator_entrypoint()) macro implementation +mod export { + pub const EXECUTOR_VALIDATE_TRANSACTION: &str = "_iroha_executor_validate_transaction"; + pub const EXECUTOR_VALIDATE_INSTRUCTION: &str = "_iroha_executor_validate_instruction"; + pub const EXECUTOR_VALIDATE_QUERY: &str = "_iroha_executor_validate_query"; + pub const EXECUTOR_MIGRATE: &str = "_iroha_executor_migrate"; +} + +mod import { + pub const GET_VALIDATE_TRANSACTION_PAYLOAD: &str = "get_validate_transaction_payload"; + pub const GET_VALIDATE_INSTRUCTION_PAYLOAD: &str = "get_validate_instruction_payload"; + pub const GET_VALIDATE_QUERY_PAYLOAD: &str = "get_validate_query_payload"; +} + +/// [`executor_entrypoint`](crate::executor_entrypoint()) macro implementation #[allow(clippy::needless_pass_by_value)] pub fn impl_entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { let fn_item = parse_macro_input!(item as syn::ItemFn); assert!( attr.is_empty(), - "`#[entrypoint]` macro for Validator entrypoints accepts no attributes" + "`#[entrypoint]` macro for Executor entrypoints accepts no attributes" ); macro_rules! match_entrypoints { @@ -25,13 +38,13 @@ pub fn impl_entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { impl_validate_entrypoint( fn_item, stringify!($user_entrypoint_name), - iroha_data_model::wasm::export::fn_names::$generated_entrypoint_name, - iroha_data_model::wasm::import::fn_names::$query_validating_object_fn_name, + export::$generated_entrypoint_name, + import::$query_validating_object_fn_name, ) })* $(fn_name if fn_name == stringify!($other_user_entrypoint_name) => $branch),* _ => panic!( - "Validator entrypoint name must be one of: {:?}", + "Executor entrypoint name must be one of: {:?}", [ $(stringify!($user_entrypoint_name),)* $(stringify!($other_user_entrypoint_name),)* @@ -43,9 +56,9 @@ pub fn impl_entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { match_entrypoints! { validate: { - validate_transaction => VALIDATOR_VALIDATE_TRANSACTION(GET_VALIDATE_TRANSACTION_PAYLOAD), - validate_instruction => VALIDATOR_VALIDATE_INSTRUCTION(GET_VALIDATE_INSTRUCTION_PAYLOAD), - validate_query => VALIDATOR_VALIDATE_QUERY(GET_VALIDATE_QUERY_PAYLOAD), + validate_transaction => EXECUTOR_VALIDATE_TRANSACTION(GET_VALIDATE_TRANSACTION_PAYLOAD), + validate_instruction => EXECUTOR_VALIDATE_INSTRUCTION(GET_VALIDATE_INSTRUCTION_PAYLOAD), + validate_query => EXECUTOR_VALIDATE_QUERY(GET_VALIDATE_QUERY_PAYLOAD), } other: { migrate => { impl_migrate_entrypoint(fn_item) } @@ -69,13 +82,13 @@ fn impl_validate_entrypoint( assert!( matches!(sig.output, syn::ReturnType::Type(_, _)), - "Validator `{user_entrypoint_name}` entrypoint must have `Result` return type" + "Executor `{user_entrypoint_name}` entrypoint must have `Result` return type" ); block.stmts.insert( 0, parse_quote!( - use ::iroha_validator::iroha_wasm::{ExecuteOnHost as _, QueryHost as _}; + use ::iroha_executor::smart_contract::{ExecuteOnHost as _, ExecuteQueryOnHost as _}; ), ); @@ -88,19 +101,19 @@ fn impl_validate_entrypoint( ); quote! { - /// Validator `validate` entrypoint + /// Executor `validate` entrypoint /// /// # Memory safety /// /// This function transfers the ownership of allocated - /// [`Result`](::iroha_validator::iroha_wasm::data_model::validator::Result) + /// [`Result`](::iroha_executor::data_model::executor::Result) #[no_mangle] #[doc(hidden)] unsafe extern "C" fn #generated_entrypoint_ident() -> *const u8 { - let payload = ::iroha_validator::iroha_wasm::#get_validation_payload_fn_ident(); - let verdict: ::iroha_validator::iroha_wasm::data_model::validator::Result = + let payload = ::iroha_executor::#get_validation_payload_fn_ident(); + let verdict: ::iroha_executor::data_model::executor::Result = #fn_name(payload.authority, payload.to_validate, payload.block_height); - let bytes_box = ::core::mem::ManuallyDrop::new(::iroha_validator::iroha_wasm::encode_with_length_prefix(&verdict)); + let bytes_box = ::core::mem::ManuallyDrop::new(::iroha_executor::utils::encode_with_length_prefix(&verdict)); bytes_box.as_ptr() } @@ -108,6 +121,7 @@ fn impl_validate_entrypoint( // NOTE: Host objects are always passed by value to wasm #[allow(clippy::needless_pass_by_value)] #(#attrs)* + #[inline] #vis #sig #block } @@ -125,16 +139,13 @@ fn impl_migrate_entrypoint(fn_item: syn::ItemFn) -> TokenStream { assert!( matches!(sig.output, syn::ReturnType::Type(_, _)), - "Validator `migrate()` entrypoint must have `MigrationResult` return type" + "Executor `migrate()` entrypoint must have `MigrationResult` return type" ); - let migrate_fn_name = syn::Ident::new( - iroha_data_model::wasm::export::fn_names::VALIDATOR_MIGRATE, - proc_macro2::Span::call_site(), - ); + let migrate_fn_name = syn::Ident::new(export::EXECUTOR_MIGRATE, proc_macro2::Span::call_site()); quote! { - /// Validator `permission_token_schema` entrypoint + /// Executor `permission_token_schema` entrypoint /// /// # Memory safety /// @@ -142,9 +153,9 @@ fn impl_migrate_entrypoint(fn_item: syn::ItemFn) -> TokenStream { #[no_mangle] #[doc(hidden)] unsafe extern "C" fn #migrate_fn_name() -> *const u8 { - let payload = ::iroha_validator::iroha_wasm::get_migrate_payload(); - let res: ::iroha_validator::data_model::validator::MigrationResult = #fn_name(payload.block_height); - let bytes = ::core::mem::ManuallyDrop::new(::iroha_validator::iroha_wasm::encode_with_length_prefix(&res)); + let payload = ::iroha_executor::get_migrate_payload(); + let res: ::iroha_executor::data_model::executor::MigrationResult = #fn_name(payload.block_height); + let bytes = ::core::mem::ManuallyDrop::new(::iroha_executor::utils::encode_with_length_prefix(&res)); ::core::mem::ManuallyDrop::new(bytes).as_ptr() } @@ -152,6 +163,7 @@ fn impl_migrate_entrypoint(fn_item: syn::ItemFn) -> TokenStream { // NOTE: False positive #[allow(clippy::unnecessary_wraps)] #(#attrs)* + #[inline] #vis #sig #block } diff --git a/smart_contract/executor/derive/src/lib.rs b/smart_contract/executor/derive/src/lib.rs new file mode 100644 index 00000000000..d822a1887e1 --- /dev/null +++ b/smart_contract/executor/derive/src/lib.rs @@ -0,0 +1,353 @@ +//! Crate with executor-related derive macros. + +use iroha_macro_utils::Emitter; +use manyhow::manyhow; +use proc_macro::TokenStream; +use proc_macro2::TokenStream as TokenStream2; +use quote::quote; +use syn::{parse_macro_input, parse_quote, DeriveInput}; + +mod conversion; +mod default; +mod entrypoint; +mod token; +mod validate; + +/// Annotate the user-defined function that starts the execution of a executor. +/// +/// There are 4 acceptable forms of this macro usage. See examples. +/// +/// # Examples +/// +/// ```ignore +/// use iroha_executor::prelude::*; +/// +/// #[entrypoint] +/// pub fn migrate(block_height: u64) -> MigrationResult { +/// todo!() +/// } +/// +/// #[entrypoint] +/// pub fn validate_transaction( +/// authority: AccountId, +/// transaction: SignedTransaction, +/// block_height: u64, +/// ) -> Result { +/// todo!() +/// } +/// +/// #[entrypoint] +/// pub fn validate_instruction(authority: AccountId, instruction: InstructionExpr, block_height: u64) -> Result { +/// todo!() +/// } +/// +/// #[entrypoint] +/// pub fn validate_query(authority: AccountId, query: QueryBox, block_height: u64) -> Result { +/// todo!() +/// } +/// ``` +#[proc_macro_attribute] +pub fn entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { + entrypoint::impl_entrypoint(attr, item) +} + +/// Derive macro for `Token` trait. +/// +/// # Example +/// +/// ```ignore +/// use iroha_executor::{permission, prelude::*}; +/// +/// #[derive(Token, ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] +/// #[validate(permission::asset::Owner)] +/// struct CanDoSomethingWithAsset { +/// some_data: String, +/// asset_id: AssetId, +/// } +/// +/// #[entrypoint(params = "[authority, operation]")] +/// fn validate(authority: AccountId, operation: NeedsValidationBox) -> Result { +/// let NeedsValidationBox::Instruction(instruction) = operation else { +/// pass!(); +/// }; +/// +/// validate_grant_revoke!(, (authority, instruction)); +/// +/// CanDoSomethingWithAsset { +/// some_data: "some data".to_owned(), +/// asset_id: parse!("rose#wonderland" as AssetId), +/// }.is_owned_by(&authority) +/// } +/// ``` +#[proc_macro_derive(Token)] +pub fn derive_token(input: TokenStream) -> TokenStream { + token::impl_derive_token(input) +} + +/// Derive macro for `ValidateGrantRevoke` trait. +/// +/// # Attributes +/// +/// This macro requires `validate` or a group of `validate_grant` and `validate_revoke` attributes. +/// +/// ## `validate` attribute +/// +/// Use `validate` to specify [*Pass Condition*](#permission) for both `Grant` and `Revoke` +/// instructions validation. +/// +/// ## `validate_grant` and `validate_revoke` attributes +/// +/// Use `validate_grant` together with `validate_revoke` to specify *pass condition* for +/// `Grant` and `Revoke` instructions validation separately. +/// +/// # Pass conditions +/// +/// You can pass any type implementing `iroha_executor::permission::PassCondition` +/// and `From<&YourToken>` traits. +/// +/// ## Builtin +/// +/// There are some builtin pass conditions: +/// +/// - `asset_definition::Owner` - checks if the authority is the asset definition owner; +/// - `asset::Owner` - checks if the authority is the asset owner; +/// - `account::Owner` - checks if the authority is the account owner. +/// - `AlwaysPass` - checks nothing and always passes. +/// - `OnlyGenesis` - checks that block height is 0. +/// +/// +/// Also check out `iroha_executor::permission::derive_conversion` module +/// for conversion derive macros from your token to this *Pass Conditions*. +/// +/// ## Why *Pass Conditions*? +/// +/// With that you can easily derive one of most popular implementations to remove boilerplate code. +/// +/// ## Manual `ValidateGrantRevoke` implementation VS Custom *Pass Condition* +/// +/// General advice is to use custom *Pass Condition* if you need this custom validation +/// multiple times in different tokens. Otherwise, you can implement `ValidateGrantRevoke` trait manually. +/// +/// In future there will be combinators like `&&` and `||` to combine multiple *Pass Conditions*. +/// +/// # Example +/// +/// See [`Token`] derive macro example. +// +// TODO: Add combinators (#3255). +// Example: +// +// ``` +// #[derive(Token, ValidateGrantRevoke)] +// #[validate(Creator || Admin)] +// pub struct CanDoSomethingWithAsset { +// ... +// } +// ``` +#[proc_macro_derive( + ValidateGrantRevoke, + attributes(validate, validate_grant, validate_revoke) +)] +pub fn derive_validate_grant_revoke(input: TokenStream) -> TokenStream { + validate::impl_derive_validate_grant_revoke(input) +} + +/// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion +/// from your token to a `permission::asset_definition::Owner` type. +/// +/// Requires `asset_definition_id` field in the token. +/// +/// Implements [`From`] for `permission::asset_definition::Owner` +/// and not [`Into`] for your type. [`Into`] will be implemented automatically. +#[proc_macro_derive(RefIntoAssetDefinitionOwner)] +pub fn derive_ref_into_asset_definition_owner(input: TokenStream) -> TokenStream { + conversion::impl_derive_ref_into_asset_definition_owner(input) +} + +/// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion +/// from your token to a `permission::asset::Owner` type. +/// +/// Requires `asset_id` field in the token. +/// +/// Implements [`From`] for `permission::asset::Owner` +/// and not [`Into`] for your type. [`Into`] will be implemented automatically. +#[proc_macro_derive(RefIntoAssetOwner)] +pub fn derive_ref_into_asset_owner(input: TokenStream) -> TokenStream { + conversion::impl_derive_ref_into_asset_owner(input) +} + +/// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion +/// from your token to a `permission::account::Owner` type. +/// +/// Requires `account_id` field in the token. +/// +/// Implements [`From`] for `permission::asset::Owner` +/// and not [`Into`] for your type. [`Into`] will be implemented automatically. +#[proc_macro_derive(RefIntoAccountOwner)] +pub fn derive_ref_into_account_owner(input: TokenStream) -> TokenStream { + conversion::impl_derive_ref_into_account_owner(input) +} + +/// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion +/// from your token to a `permission::domain::Owner` type. +/// +/// Requires `domain_id` field in the token. +/// +/// Implements [`From`] for `permission::domain::Owner` +/// and not [`Into`] for your type. [`Into`] will be implemented automatically. +#[proc_macro_derive(RefIntoDomainOwner)] +pub fn derive_ref_into_domain_owner(input: TokenStream) -> TokenStream { + conversion::impl_derive_ref_into_domain_owner(input) +} + +/// Implements the `iroha_executor::Validate` trait for the given `Executor` struct. As +/// this trait has a `iroha_executor::prelude::Visit`, and the latter has an +/// `iroha_executor::iroha_data_model::evaluate::ExpressionEvaluator` +/// bound, at least these two should be implemented as well. +/// +/// Emits a compile error if the struct didn't have all the expected fields with corresponding +/// types, i.e. `verdict`: `iroha_executor::prelude::Result`, `block_height`: `u64` and +/// `host`: `iroha_executor::smart_contract::Host`, though technically only `verdict` and +/// `block_height` are needed. The types can be unqualified, but not aliased. +#[manyhow] +#[proc_macro_derive(Validate)] +pub fn derive_validate(input: TokenStream2) -> TokenStream2 { + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = default::impl_derive_validate(&mut emitter, &input); + + emitter.finish_token_stream_with(result) +} + +/// Implements the `iroha_executor::prelude::Visit` trait on a given `Executor` struct. +/// Users can supply custom overrides for any of the visit functions as freestanding functions +/// in the same module via the `#[visit(custom(...))]` attribute by +/// supplying corresponding visit function names inside of it, otherwise a default +/// implementation from `iroha_executor::default` module is used. +/// +/// Emits a compile error if the struct didn't have all the expected fields with corresponding +/// types, i.e. `verdict`: `iroha_executor::prelude::Result`, `block_height`: `u64` and +/// `host`: `iroha_executor::smart_contract::Host`, though technically only `verdict` +/// is needed. The types can be unqualified, but not aliased. +/// +/// # Example +/// +/// ```ignore +/// use iroha_executor::{smart_contract, prelude::*}; +/// +/// #[derive(Constructor, Entrypoints, ExpressionEvaluator, Validate, Visit)] +/// #[visit(custom(visit_query)] +/// pub struct Executor { +/// verdict: Result, +/// block_height: u64, +/// host: smart_contract::Host, +/// } +/// +/// // Custom visit function should supply a `&mut Executor` as first argument +/// fn visit_query(executor: &mut Executor, _authority: &AccountId, _query: &QueryBox) { +/// executor.deny(ValidationFail::NotPermitted( +/// "All queries are forbidden".to_owned(), +/// )); +/// } +/// ``` +#[manyhow] +#[proc_macro_derive(Visit, attributes(visit))] +pub fn derive_visit(input: TokenStream2) -> TokenStream2 { + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = default::impl_derive_visit(&mut emitter, &input); + + emitter.finish_token_stream_with(result) +} + +/// Implements three default entrypoints on a given `Executor` struct: `validate_transaction`, +/// `validate_query` and `validate_instruction`. The `migrate` entrypoint is implied to be +/// implemented manually by the user at all times. +/// +/// Users can supply custom overrides for any of the entrypoint functions as freestanding functions +/// in the same module via the `#[entrypoints(custom(...))]` attribute by +/// supplying corresponding entrypoint function names inside of it. +/// +/// Emits a compile error if the struct didn't have all the expected fields with corresponding +/// types, i.e. `verdict`: `iroha_executor::prelude::Result`, `block_height`: `u64` and +/// `host`: `iroha_executor::smart_contract::Host`, though technically only `verdict` +/// is needed. The types can be unqualified, but not aliased. +/// +/// # Example +/// +/// ```ignore +/// use iroha_executor::{smart_contract, prelude::*}; +/// +/// #[derive(Constructor, Entrypoints, ExpressionEvaluator, Validate, Visit)] +/// #[entrypoints(custom(validate_query))] +/// pub struct Executor { +/// verdict: Result, +/// block_height: u64, +/// host: smart_contract::Host, +/// } +/// +/// ``` +#[manyhow] +#[proc_macro_derive(ValidateEntrypoints, attributes(entrypoints))] +pub fn derive_entrypoints(input: TokenStream2) -> TokenStream2 { + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = default::impl_derive_entrypoints(&mut emitter, &input); + + emitter.finish_token_stream_with(result) +} + +/// Implements `iroha_executor::iroha_data_model::evaluate::ExpressionEvaluator` trait +/// for the given `Executor` struct. +/// +/// Emits a compile error if the struct didn't have all the expected fields with corresponding +/// types, i.e. `verdict`: `iroha_executor::prelude::Result`, `block_height`: `u64` and +/// `host`: `iroha_executor::smart_contract::Host`, though technically only `host` is needed. +/// The types can be unqualified, but not aliased. +#[manyhow] +#[proc_macro_derive(ExpressionEvaluator)] +pub fn derive_expression_evaluator(input: TokenStream2) -> TokenStream2 { + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = default::impl_derive_expression_evaluator(&mut emitter, &input); + + emitter.finish_token_stream_with(result) +} + +/// Implements a constructor for the given `Executor` struct. If the `Executor` has any custom fields +/// (i.e. different from the expected fields listed below), they will be included into the constructor +/// automatically and will need to be passed into `new()` function explicitly. In the default case, +/// only the `block_height` needs to be supplied manually. +/// +/// Emits a compile error if the struct didn't have all the expected fields with corresponding +/// types, i.e. `verdict`: `iroha_executor::prelude::Result`, `block_height`: `u64` and +/// `host`: `iroha_executor::smart_contract::Host`. The types can be unqualified, but not aliased. +#[manyhow] +#[proc_macro_derive(Constructor)] +pub fn derive_constructor(input: TokenStream2) -> TokenStream2 { + let mut emitter = Emitter::new(); + + let Some(input) = emitter.handle(syn2::parse2(input)) else { + return emitter.finish_token_stream(); + }; + + let result = default::impl_derive_constructor(&mut emitter, &input); + + emitter.finish_token_stream_with(result) +} diff --git a/wasm/validator/derive/src/token.rs b/smart_contract/executor/derive/src/token.rs similarity index 50% rename from wasm/validator/derive/src/token.rs rename to smart_contract/executor/derive/src/token.rs index 38cc336641a..69f7915d65e 100644 --- a/wasm/validator/derive/src/token.rs +++ b/smart_contract/executor/derive/src/token.rs @@ -1,7 +1,5 @@ //! Module with [`derive_token`](crate::derive_token) macro implementation -#![allow(clippy::arithmetic_side_effects)] // Triggers on quote! side - use super::*; /// [`derive_token`](crate::derive_token()) macro implementation @@ -24,19 +22,23 @@ fn impl_token(ident: &syn::Ident, generics: &syn::Generics) -> proc_macro2::Toke let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); quote! { - impl #impl_generics ::iroha_validator::permission::Token for #ident #ty_generics #where_clause { - fn is_owned_by(&self, account_id: &::iroha_validator::data_model::prelude::AccountId) -> bool { - let all_account_tokens: Vec = ::iroha_validator::iroha_wasm::debug::DebugExpectExt::dbg_expect( - ::iroha_validator::iroha_wasm::QueryHost::execute( - &::iroha_validator::iroha_wasm::data_model::prelude::FindPermissionTokensByAccountId::new( + impl #impl_generics ::iroha_executor::permission::Token for #ident #ty_generics #where_clause { + fn is_owned_by(&self, account_id: &::iroha_executor::data_model::account::AccountId) -> bool { + let account_tokens_cursor = ::iroha_executor::smart_contract::debug::DebugExpectExt::dbg_expect( + ::iroha_executor::smart_contract::ExecuteQueryOnHost::execute( + ::iroha_executor::data_model::query::permission::FindPermissionTokensByAccountId::new( account_id.clone(), ) ), "Failed to execute `FindPermissionTokensByAccountId` query" - ).try_into().unwrap(); + ); - all_account_tokens + account_tokens_cursor .into_iter() + .map(|res| ::iroha_executor::smart_contract::debug::DebugExpectExt::dbg_expect( + res, + "Failed to get permission token from cursor" + )) .filter_map(|token| Self::try_from(token).ok()) .any(|token| self == &token) } @@ -49,20 +51,20 @@ fn impl_try_from_permission_token( generics: &syn::Generics, ) -> proc_macro2::TokenStream { let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); - let token_id = quote! { ::name() }; + let token_id = quote! { ::name() }; quote! { - impl #impl_generics ::core::convert::TryFrom<::iroha_validator::data_model::permission::PermissionToken> for #ident #ty_generics #where_clause { - type Error = ::iroha_validator::permission::PermissionTokenConversionError; + impl #impl_generics ::core::convert::TryFrom<::iroha_executor::data_model::permission::PermissionToken> for #ident #ty_generics #where_clause { + type Error = ::iroha_executor::permission::PermissionTokenConversionError; - fn try_from(token: ::iroha_validator::data_model::permission::PermissionToken) -> ::core::result::Result { + fn try_from(token: ::iroha_executor::data_model::permission::PermissionToken) -> ::core::result::Result { if #token_id != *token.definition_id() { - return Err(::iroha_validator::permission::PermissionTokenConversionError::Id( - ::alloc::borrow::ToOwned::to_owned(token.definition_id()) + return Err(::iroha_executor::permission::PermissionTokenConversionError::Id( + ToOwned::to_owned(token.definition_id()) )); } ::serde_json::from_str::(token.payload()) - .map_err(::iroha_validator::permission::PermissionTokenConversionError::Deserialize) + .map_err(::iroha_executor::permission::PermissionTokenConversionError::Deserialize) } } } diff --git a/wasm/validator/derive/src/validate.rs b/smart_contract/executor/derive/src/validate.rs similarity index 90% rename from wasm/validator/derive/src/validate.rs rename to smart_contract/executor/derive/src/validate.rs index 0ffc97afad8..de6cc982cf1 100644 --- a/wasm/validator/derive/src/validate.rs +++ b/smart_contract/executor/derive/src/validate.rs @@ -6,7 +6,7 @@ use syn::{Attribute, Ident, Path, Type}; use super::*; /// [`derive_validate`](crate::derive_validate()) macro implementation -pub fn impl_derive_validate(input: TokenStream) -> TokenStream { +pub fn impl_derive_validate_grant_revoke(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let ident = input.ident; @@ -15,7 +15,7 @@ pub fn impl_derive_validate(input: TokenStream) -> TokenStream { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); quote! { - impl #impl_generics ::iroha_validator::permission::ValidateGrantRevoke for #ident #ty_generics + impl #impl_generics ::iroha_executor::permission::ValidateGrantRevoke for #ident #ty_generics #where_clause { #validate_grant_impl @@ -136,15 +136,14 @@ impl ValidateAttribute { fn gen_validate_impls( attributes: &[Attribute], ) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) { - use ValidateAttribute::*; - let validate_attribute = ValidateAttribute::from_attributes(attributes); + match validate_attribute { - General(pass_condition) => ( + ValidateAttribute::General(pass_condition) => ( gen_validate_impl(IsiName::Grant, &pass_condition), gen_validate_impl(IsiName::Revoke, &pass_condition), ), - Separate { + ValidateAttribute::Separate { grant_condition, revoke_condition, } => ( @@ -181,10 +180,10 @@ fn gen_validate_impl(isi_name: IsiName, pass_condition: &Type) -> proc_macro2::T let doc_intro = match isi_name { IsiName::Grant => { - "Validate [`Grant`](::iroha_validator::data_model::prelude::Grant) instruction.\n" + "Validate [`Grant`](::iroha_executor::data_model::prelude::Grant) instruction.\n" } IsiName::Revoke => { - "Validate [`Revoke`](::iroha_validator::data_model::prelude::Revoke) instruction.\n" + "Validate [`Revoke`](::iroha_executor::data_model::prelude::Revoke) instruction.\n" } }; @@ -196,12 +195,12 @@ fn gen_validate_impl(isi_name: IsiName, pass_condition: &Type) -> proc_macro2::T #[doc = #pass_condition_str] #[doc = "`]"] #[inline] - fn #fn_name(&self, authority: &::iroha_validator::data_model::account::AccountId, block_height: u64) -> ::iroha_validator::data_model::validator::Result { + fn #fn_name(&self, authority: &::iroha_executor::data_model::account::AccountId, block_height: u64) -> ::iroha_executor::data_model::executor::Result { let condition = <#pass_condition as ::core::convert::From<&Self>>::from(&self); < #pass_condition as - ::iroha_validator::permission::PassCondition + ::iroha_executor::permission::PassCondition >::validate(&condition, authority, block_height) } } diff --git a/smart_contract/executor/src/default.rs b/smart_contract/executor/src/default.rs new file mode 100644 index 00000000000..78e46240d7f --- /dev/null +++ b/smart_contract/executor/src/default.rs @@ -0,0 +1,1240 @@ +//! Definition of Iroha default executor and accompanying validation functions +#![allow(missing_docs, clippy::missing_errors_doc)] + +pub mod tokens; + +use alloc::format; + +pub use account::{ + visit_burn_account_public_key, visit_mint_account_public_key, + visit_mint_account_signature_check_condition, visit_remove_account_key_value, + visit_set_account_key_value, visit_unregister_account, +}; +pub use asset::{ + visit_burn_asset, visit_mint_asset, visit_register_asset, visit_remove_asset_key_value, + visit_set_asset_key_value, visit_transfer_asset, visit_unregister_asset, +}; +pub use asset_definition::{ + visit_remove_asset_definition_key_value, visit_set_asset_definition_key_value, + visit_transfer_asset_definition, visit_unregister_asset_definition, +}; +pub use domain::{ + visit_remove_domain_key_value, visit_set_domain_key_value, visit_transfer_domain, + visit_unregister_domain, +}; +pub use executor::visit_upgrade_executor; +use iroha_smart_contract::debug::DebugExpectExt as _; +pub use parameter::{visit_new_parameter, visit_set_parameter}; +pub use peer::visit_unregister_peer; +pub use permission_token::{visit_grant_account_permission, visit_revoke_account_permission}; +pub use role::{ + visit_grant_account_role, visit_register_role, visit_revoke_account_role, visit_unregister_role, +}; +pub use trigger::{ + visit_burn_trigger_repetitions, visit_execute_trigger, visit_mint_trigger_repetitions, + visit_unregister_trigger, +}; + +use crate::{permission, permission::Token as _, prelude::*}; + +macro_rules! evaluate_expr { + ($visitor:ident, $authority:ident, <$isi:ident as $isi_type:ty>::$field:ident()) => {{ + $visitor.visit_expression($authority, $isi.$field()); + + $visitor.evaluate($isi.$field()).dbg_expect(&alloc::format!( + "Failed to evaluate field '{}::{}'", + stringify!($isi_type), + stringify!($field), + )) + }}; +} + +pub fn default_permission_token_schema() -> PermissionTokenSchema { + let mut schema = iroha_executor::PermissionTokenSchema::default(); + + macro_rules! add_to_schema { + ($token_ty:ty) => { + schema.insert::<$token_ty>(); + }; + } + + tokens::map_token_type!(add_to_schema); + + schema +} + +/// Default validation for [`SignedTransaction`]. +/// +/// # Warning +/// +/// Each instruction is executed in sequence following successful validation. +/// [`Executable::Wasm`] is not executed because it is validated on the host side. +pub fn visit_transaction( + executor: &mut V, + authority: &AccountId, + transaction: &SignedTransaction, +) { + match transaction.payload().instructions() { + Executable::Wasm(wasm) => executor.visit_wasm(authority, wasm), + Executable::Instructions(instructions) => { + for isi in instructions { + if executor.verdict().is_ok() { + executor.visit_instruction(authority, isi); + } + } + } + } +} + +/// Default validation for [`InstructionExpr`]. +/// +/// # Warning +/// +/// Instruction is executed following successful validation +pub fn visit_instruction( + executor: &mut V, + authority: &AccountId, + isi: &InstructionExpr, +) { + macro_rules! isi_executors { + ( + single {$( + $executor:ident($isi:ident) + ),+ $(,)?} + composite {$( + $composite_executor:ident($composite_isi:ident) + ),+ $(,)?} + ) => { + match isi { + InstructionExpr::NewParameter(isi) => { + let parameter = evaluate_expr!(executor, authority, ::parameter()); + executor.visit_new_parameter(authority, NewParameter{parameter}); + + if executor.verdict().is_ok() { + isi_executors!(@execute isi); + } + } + InstructionExpr::SetParameter(isi) => { + let parameter = evaluate_expr!(executor, authority, ::parameter()); + executor.visit_set_parameter(authority, SetParameter{parameter}); + + if executor.verdict().is_ok() { + isi_executors!(@execute isi); + } + } + InstructionExpr::ExecuteTrigger(isi) => { + let trigger_id = evaluate_expr!(executor, authority, ::trigger_id()); + executor.visit_execute_trigger(authority, ExecuteTrigger{trigger_id}); + + if executor.verdict().is_ok() { + isi_executors!(@execute isi); + } + } + InstructionExpr::Log(isi) => { + let msg = evaluate_expr!(executor, authority, ::msg()); + let level = evaluate_expr!(executor, authority, ::level()); + executor.visit_log(authority, Log{level, msg}); + + if executor.verdict().is_ok() { + isi_executors!(@execute isi); + } + } $( + InstructionExpr::$isi(isi) => { + executor.$executor(authority, isi); + + if executor.verdict().is_ok() { + isi_executors!(@execute isi); + } + } )+ $( + // NOTE: `visit_and_execute_instructions` is reentrant, so don't execute composite instructions + InstructionExpr::$composite_isi(isi) => executor.$composite_executor(authority, isi), )+ + } + }; + (@execute $isi:ident) => { + // TODO: Execution should be infallible after successful validation + if let Err(err) = isi.execute() { + executor.deny(err); + } + } + } + + isi_executors! { + single { + visit_burn(Burn), + visit_fail(Fail), + visit_grant(Grant), + visit_mint(Mint), + visit_register(Register), + visit_remove_key_value(RemoveKeyValue), + visit_revoke(Revoke), + visit_set_key_value(SetKeyValue), + visit_transfer(Transfer), + visit_unregister(Unregister), + visit_upgrade(Upgrade), + } + + composite { + visit_sequence(Sequence), + visit_pair(Pair), + visit_if(If), + } + } +} + +pub fn visit_unsupported( + executor: &mut V, + _authority: &AccountId, + isi: T, +) { + deny!(executor, "{isi:?}: Unsupported operation"); +} + +pub fn visit_expression( + executor: &mut V, + authority: &AccountId, + expression: &EvaluatesTo, +) { + macro_rules! visit_binary_expression { + ($e:ident) => {{ + executor.visit_expression(authority, $e.left()); + + if executor.verdict().is_ok() { + executor.visit_expression(authority, $e.right()); + } + }}; + } + + match expression.expression() { + Expression::Add(expr) => visit_binary_expression!(expr), + Expression::Subtract(expr) => visit_binary_expression!(expr), + Expression::Multiply(expr) => visit_binary_expression!(expr), + Expression::Divide(expr) => visit_binary_expression!(expr), + Expression::Mod(expr) => visit_binary_expression!(expr), + Expression::RaiseTo(expr) => visit_binary_expression!(expr), + Expression::Greater(expr) => visit_binary_expression!(expr), + Expression::Less(expr) => visit_binary_expression!(expr), + Expression::Equal(expr) => visit_binary_expression!(expr), + Expression::Not(expr) => executor.visit_expression(authority, expr.expression()), + Expression::And(expr) => visit_binary_expression!(expr), + Expression::Or(expr) => visit_binary_expression!(expr), + Expression::If(expr) => { + executor.visit_expression(authority, expr.condition()); + + if executor.verdict().is_ok() { + executor.visit_expression(authority, expr.then()); + } + + if executor.verdict().is_ok() { + executor.visit_expression(authority, expr.otherwise()); + } + } + Expression::Contains(expr) => { + executor.visit_expression(authority, expr.collection()); + + if executor.verdict().is_ok() { + executor.visit_expression(authority, expr.element()); + } + } + Expression::ContainsAll(expr) => { + executor.visit_expression(authority, expr.collection()); + + if executor.verdict().is_ok() { + executor.visit_expression(authority, expr.elements()); + } + } + Expression::ContainsAny(expr) => { + executor.visit_expression(authority, expr.collection()); + + if executor.verdict().is_ok() { + executor.visit_expression(authority, expr.elements()); + } + } + Expression::Where(expr) => executor.visit_expression(authority, expr.expression()), + Expression::Query(query) => executor.visit_query(authority, query), + Expression::ContextValue(_) | Expression::Raw(_) => (), + } +} + +pub fn visit_if( + executor: &mut V, + authority: &AccountId, + isi: &ConditionalExpr, +) { + let condition = evaluate_expr!(executor, authority, ::condition()); + + // TODO: Do we have to make sure both branches are syntactically valid? + if condition { + executor.visit_instruction(authority, isi.then()); + } else if let Some(otherwise) = isi.otherwise() { + executor.visit_instruction(authority, otherwise); + } +} + +pub fn visit_pair(executor: &mut V, authority: &AccountId, isi: &PairExpr) { + executor.visit_instruction(authority, isi.left_instruction()); + + if executor.verdict().is_ok() { + executor.visit_instruction(authority, isi.right_instruction()) + } +} + +pub fn visit_sequence( + executor: &mut V, + authority: &AccountId, + sequence: &SequenceExpr, +) { + for isi in sequence.instructions() { + if executor.verdict().is_ok() { + executor.visit_instruction(authority, isi); + } + } +} + +pub mod peer { + use super::*; + + #[allow(clippy::needless_pass_by_value)] + pub fn visit_unregister_peer( + executor: &mut V, + authority: &AccountId, + _isi: Unregister, + ) { + if is_genesis(executor) { + pass!(executor); + } + if tokens::peer::CanUnregisterAnyPeer.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't unregister peer"); + } +} + +pub mod domain { + use permission::domain::is_domain_owner; + + use super::*; + + pub fn visit_unregister_domain( + executor: &mut V, + authority: &AccountId, + isi: Unregister, + ) { + let domain_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_domain_owner(&domain_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_unregister_domain_token = tokens::domain::CanUnregisterDomain { domain_id }; + if can_unregister_domain_token.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't unregister domain"); + } + + pub fn visit_transfer_domain( + executor: &mut V, + authority: &AccountId, + isi: Transfer, + ) { + let destination_id = isi.object; + + if is_genesis(executor) { + pass!(executor); + } + match is_domain_owner(&destination_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + + deny!(executor, "Can't transfer domain of another account"); + } + + pub fn visit_set_domain_key_value( + executor: &mut V, + authority: &AccountId, + isi: SetKeyValue, + ) { + let domain_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_domain_owner(&domain_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_set_key_value_in_domain_token = + tokens::domain::CanSetKeyValueInDomain { domain_id }; + if can_set_key_value_in_domain_token.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't set key value in domain metadata"); + } + + pub fn visit_remove_domain_key_value( + executor: &mut V, + authority: &AccountId, + isi: RemoveKeyValue, + ) { + let domain_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_domain_owner(&domain_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_remove_key_value_in_domain_token = + tokens::domain::CanRemoveKeyValueInDomain { domain_id }; + if can_remove_key_value_in_domain_token.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't remove key value in domain metadata"); + } +} + +pub mod account { + use permission::account::is_account_owner; + + use super::*; + + pub fn visit_unregister_account( + executor: &mut V, + authority: &AccountId, + isi: Unregister, + ) { + let account_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_account_owner(&account_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_unregister_user_account = tokens::account::CanUnregisterAccount { account_id }; + if can_unregister_user_account.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't unregister another account"); + } + + pub fn visit_mint_account_public_key( + executor: &mut V, + authority: &AccountId, + isi: Mint, + ) { + let account_id = isi.destination_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_account_owner(&account_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_mint_user_public_keys = tokens::account::CanMintUserPublicKeys { account_id }; + if can_mint_user_public_keys.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't mint public keys of another account"); + } + + pub fn visit_burn_account_public_key( + executor: &mut V, + authority: &AccountId, + isi: Burn, + ) { + let account_id = isi.destination_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_account_owner(&account_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_burn_user_public_keys = tokens::account::CanBurnUserPublicKeys { account_id }; + if can_burn_user_public_keys.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't burn public keys of another account"); + } + + pub fn visit_mint_account_signature_check_condition( + executor: &mut V, + authority: &AccountId, + isi: Mint, + ) { + let account_id = isi.destination_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_account_owner(&account_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_mint_user_signature_check_conditions_token = + tokens::account::CanMintUserSignatureCheckConditions { account_id }; + if can_mint_user_signature_check_conditions_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't mint signature check conditions of another account" + ); + } + + pub fn visit_set_account_key_value( + executor: &mut V, + authority: &AccountId, + isi: SetKeyValue, + ) { + let account_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_account_owner(&account_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_set_key_value_in_user_account_token = + tokens::account::CanSetKeyValueInUserAccount { account_id }; + if can_set_key_value_in_user_account_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't set value to the metadata of another account" + ); + } + + pub fn visit_remove_account_key_value( + executor: &mut V, + authority: &AccountId, + isi: RemoveKeyValue, + ) { + let account_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_account_owner(&account_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_remove_key_value_in_user_account_token = + tokens::account::CanRemoveKeyValueInUserAccount { account_id }; + if can_remove_key_value_in_user_account_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't remove value from the metadata of another account" + ); + } +} + +pub mod asset_definition { + use permission::{account::is_account_owner, asset_definition::is_asset_definition_owner}; + + use super::*; + + pub fn visit_unregister_asset_definition( + executor: &mut V, + authority: &AccountId, + isi: Unregister, + ) { + let asset_definition_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_definition_owner(&asset_definition_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_unregister_asset_definition_token = + tokens::asset_definition::CanUnregisterAssetDefinition { + asset_definition_id, + }; + if can_unregister_asset_definition_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't unregister assets registered by other accounts" + ); + } + + pub fn visit_transfer_asset_definition( + executor: &mut V, + authority: &AccountId, + isi: Transfer, + ) { + let source_id = isi.source_id; + let destination_id = isi.object; + + if is_genesis(executor) { + pass!(executor); + } + match is_account_owner(&source_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + match is_asset_definition_owner(&destination_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + + deny!( + executor, + "Can't transfer asset definition of another account" + ); + } + + pub fn visit_set_asset_definition_key_value( + executor: &mut V, + authority: &AccountId, + isi: SetKeyValue, + ) { + let asset_definition_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_definition_owner(&asset_definition_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_set_key_value_in_asset_definition_token = + tokens::asset_definition::CanSetKeyValueInAssetDefinition { + asset_definition_id, + }; + if can_set_key_value_in_asset_definition_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't set value to the asset definition metadata created by another account" + ); + } + + pub fn visit_remove_asset_definition_key_value( + executor: &mut V, + authority: &AccountId, + isi: RemoveKeyValue, + ) { + let asset_definition_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_definition_owner(&asset_definition_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_remove_key_value_in_asset_definition_token = + tokens::asset_definition::CanRemoveKeyValueInAssetDefinition { + asset_definition_id, + }; + if can_remove_key_value_in_asset_definition_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't remove value from the asset definition metadata created by another account" + ); + } +} + +pub mod asset { + use permission::{asset::is_asset_owner, asset_definition::is_asset_definition_owner}; + + use super::*; + + pub fn visit_register_asset( + executor: &mut V, + authority: &AccountId, + isi: Register, + ) { + let asset = isi.object; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_definition_owner(asset.id().definition_id(), authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_register_assets_with_definition_token = + tokens::asset::CanRegisterAssetsWithDefinition { + asset_definition_id: asset.id().definition_id().clone(), + }; + if can_register_assets_with_definition_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't register assets with definitions registered by other accounts" + ); + } + + pub fn visit_unregister_asset( + executor: &mut V, + authority: &AccountId, + isi: Unregister, + ) { + let asset_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_owner(&asset_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + match is_asset_definition_owner(asset_id.definition_id(), authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_unregister_assets_with_definition_token = + tokens::asset::CanUnregisterAssetsWithDefinition { + asset_definition_id: asset_id.definition_id().clone(), + }; + if can_unregister_assets_with_definition_token.is_owned_by(authority) { + pass!(executor); + } + let can_unregister_user_asset_token = tokens::asset::CanUnregisterUserAsset { asset_id }; + if can_unregister_user_asset_token.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't unregister asset from another account"); + } + + pub fn visit_mint_asset( + executor: &mut V, + authority: &AccountId, + isi: Mint, + ) { + let asset_id = isi.destination_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_definition_owner(asset_id.definition_id(), authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_mint_assets_with_definition_token = tokens::asset::CanMintAssetsWithDefinition { + asset_definition_id: asset_id.definition_id().clone(), + }; + if can_mint_assets_with_definition_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't mint assets with definitions registered by other accounts" + ); + } + + pub fn visit_burn_asset( + executor: &mut V, + authority: &AccountId, + isi: Burn, + ) { + let asset_id = isi.destination_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_owner(&asset_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + match is_asset_definition_owner(asset_id.definition_id(), authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_burn_assets_with_definition_token = tokens::asset::CanBurnAssetsWithDefinition { + asset_definition_id: asset_id.definition_id().clone(), + }; + if can_burn_assets_with_definition_token.is_owned_by(authority) { + pass!(executor); + } + let can_burn_user_asset_token = tokens::asset::CanBurnUserAsset { asset_id }; + if can_burn_user_asset_token.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't burn assets from another account"); + } + + pub fn visit_transfer_asset( + executor: &mut V, + authority: &AccountId, + isi: Transfer, + ) { + let asset_id = isi.source_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_owner(&asset_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + match is_asset_definition_owner(asset_id.definition_id(), authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_transfer_assets_with_definition_token = + tokens::asset::CanTransferAssetsWithDefinition { + asset_definition_id: asset_id.definition_id().clone(), + }; + if can_transfer_assets_with_definition_token.is_owned_by(authority) { + pass!(executor); + } + let can_transfer_user_asset_token = tokens::asset::CanTransferUserAsset { asset_id }; + if can_transfer_user_asset_token.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't transfer assets of another account"); + } + + pub fn visit_set_asset_key_value( + executor: &mut V, + authority: &AccountId, + isi: SetKeyValue, + ) { + let asset_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_owner(&asset_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + + let can_set_key_value_in_user_asset_token = + tokens::asset::CanSetKeyValueInUserAsset { asset_id }; + if can_set_key_value_in_user_asset_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't set value to the asset metadata of another account" + ); + } + + pub fn visit_remove_asset_key_value( + executor: &mut V, + authority: &AccountId, + isi: RemoveKeyValue, + ) { + let asset_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_asset_owner(&asset_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_remove_key_value_in_user_asset_token = + tokens::asset::CanRemoveKeyValueInUserAsset { asset_id }; + if can_remove_key_value_in_user_asset_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't remove value from the asset metadata of another account" + ); + } +} + +pub mod parameter { + use super::*; + + #[allow(clippy::needless_pass_by_value)] + pub fn visit_new_parameter( + executor: &mut V, + authority: &AccountId, + _isi: NewParameter, + ) { + if is_genesis(executor) { + pass!(executor); + } + if tokens::parameter::CanCreateParameters.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't create new configuration parameters outside genesis without permission" + ); + } + + #[allow(clippy::needless_pass_by_value)] + pub fn visit_set_parameter( + executor: &mut V, + authority: &AccountId, + _isi: SetParameter, + ) { + if is_genesis(executor) { + pass!(executor); + } + if tokens::parameter::CanSetParameters.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't set configuration parameters without permission" + ); + } +} + +pub mod role { + use super::*; + + macro_rules! impl_validate { + ($executor:ident, $isi:ident, $authority:ident, $method:ident) => { + let role_id = $isi.object; + + let find_role_query_res = match FindRoleByRoleId::new(role_id).execute() { + Ok(res) => res.into_raw_parts().0, + Err(error) => { + deny!($executor, error); + } + }; + let role = Role::try_from(find_role_query_res).unwrap(); + + let mut unknown_tokens = Vec::new(); + for token in role.permissions() { + macro_rules! visit_internal { + ($token:ident) => { + if !is_genesis($executor) { + if let Err(error) = permission::ValidateGrantRevoke::$method( + &$token, + $authority, + $executor.block_height(), + ) + { + deny!($executor, error); + } + } + + continue; + }; + } + + tokens::map_token!(token => visit_internal); + unknown_tokens.push(token); + } + + assert!(unknown_tokens.is_empty(), "Role contains unknown permission tokens: {unknown_tokens:?}"); + }; + } + + #[allow(clippy::needless_pass_by_value)] + pub fn visit_register_role( + executor: &mut V, + _authority: &AccountId, + isi: Register, + ) { + let role = isi.object.inner(); + + let mut unknown_tokens = Vec::new(); + for token in role.permissions() { + iroha_smart_contract::debug!(&format!("Checking `{token:?}`")); + + macro_rules! try_from_token { + ($token:ident) => { + let _token = $token; + continue; + }; + } + + tokens::map_token!(token => try_from_token); + unknown_tokens.push(token); + } + + if !unknown_tokens.is_empty() { + deny!( + executor, + ValidationFail::NotPermitted(format!( + "{unknown_tokens:?}: Unrecognised permission tokens" + )) + ); + } + + pass!(executor); + } + + #[allow(clippy::needless_pass_by_value)] + pub fn visit_unregister_role( + executor: &mut V, + authority: &AccountId, + _isi: Unregister, + ) { + if is_genesis(executor) { + pass!(executor); + } + if tokens::role::CanUnregisterAnyRole.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't unregister role"); + } + + pub fn visit_grant_account_role( + executor: &mut V, + authority: &AccountId, + isi: Grant, + ) { + impl_validate!(executor, isi, authority, validate_grant); + } + + pub fn visit_revoke_account_role( + executor: &mut V, + authority: &AccountId, + isi: Revoke, + ) { + impl_validate!(executor, isi, authority, validate_revoke); + } +} + +pub mod trigger { + use permission::trigger::is_trigger_owner; + + use super::*; + + pub fn visit_unregister_trigger( + executor: &mut V, + authority: &AccountId, + isi: Unregister>, + ) { + let trigger_id = isi.object_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_trigger_owner(&trigger_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_unregister_user_trigger_token = + tokens::trigger::CanUnregisterUserTrigger { trigger_id }; + if can_unregister_user_trigger_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't unregister trigger owned by another account" + ); + } + + pub fn visit_mint_trigger_repetitions( + executor: &mut V, + authority: &AccountId, + isi: Mint>, + ) { + let trigger_id = isi.destination_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_trigger_owner(&trigger_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_mint_user_trigger_token = tokens::trigger::CanMintUserTrigger { trigger_id }; + if can_mint_user_trigger_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't mint execution count for trigger owned by another account" + ); + } + + pub fn visit_burn_trigger_repetitions( + executor: &mut V, + authority: &AccountId, + isi: Burn>, + ) { + let trigger_id = isi.destination_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_trigger_owner(&trigger_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_mint_user_trigger_token = tokens::trigger::CanBurnUserTrigger { trigger_id }; + if can_mint_user_trigger_token.is_owned_by(authority) { + pass!(executor); + } + + deny!( + executor, + "Can't burn execution count for trigger owned by another account" + ); + } + + pub fn visit_execute_trigger( + executor: &mut V, + authority: &AccountId, + isi: ExecuteTrigger, + ) { + let trigger_id = isi.trigger_id; + + if is_genesis(executor) { + pass!(executor); + } + match is_trigger_owner(&trigger_id, authority) { + Err(err) => deny!(executor, err), + Ok(true) => pass!(executor), + Ok(false) => {} + } + let can_execute_trigger_token = tokens::trigger::CanExecuteUserTrigger { trigger_id }; + if can_execute_trigger_token.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't execute trigger owned by another account"); + } +} + +pub mod permission_token { + use super::*; + + macro_rules! impl_validate { + ($executor:ident, $authority:ident, $self:ident, $method:ident) => { + let token = $self.object; + + macro_rules! visit_internal { + ($token:ident) => { + if is_genesis($executor) { + pass!($executor); + } + if let Err(error) = permission::ValidateGrantRevoke::$method( + &$token, + $authority, + $executor.block_height(), + ) { + deny!($executor, error); + } + + pass!($executor); + }; + } + + tokens::map_token!(token => visit_internal); + + deny!( + $executor, + ValidationFail::NotPermitted(format!("{token:?}: Unknown permission token")) + ); + }; + } + + pub fn visit_grant_account_permission( + executor: &mut V, + authority: &AccountId, + isi: Grant, + ) { + impl_validate!(executor, authority, isi, validate_grant); + } + + pub fn visit_revoke_account_permission( + executor: &mut V, + authority: &AccountId, + isi: Revoke, + ) { + impl_validate!(executor, authority, isi, validate_revoke); + } +} + +pub mod executor { + use super::*; + + #[allow(clippy::needless_pass_by_value)] + pub fn visit_upgrade_executor( + executor: &mut V, + authority: &AccountId, + _isi: Upgrade, + ) { + if is_genesis(executor) { + pass!(executor); + } + if tokens::executor::CanUpgradeExecutor.is_owned_by(authority) { + pass!(executor); + } + + deny!(executor, "Can't upgrade executor"); + } +} + +fn is_genesis(executor: &V) -> bool { + executor.block_height() == 0 +} diff --git a/smart_contract/executor/src/default/tokens.rs b/smart_contract/executor/src/default/tokens.rs new file mode 100644 index 00000000000..df79280ba40 --- /dev/null +++ b/smart_contract/executor/src/default/tokens.rs @@ -0,0 +1,470 @@ +//! Definition of Iroha default permission tokens +#![allow(missing_docs, clippy::missing_errors_doc)] + +use alloc::{borrow::ToOwned, format, string::String, vec::Vec}; + +use iroha_executor_derive::ValidateGrantRevoke; +use iroha_smart_contract::data_model::{executor::Result, prelude::*}; + +use crate::permission::{self, Token as _}; + +/// Declare token types of current module. Use it with a full path to the token. +/// Used to iterate over tokens to validate `Grant` and `Revoke` instructions. +/// +/// +/// TODO: Replace with procedural macro. Example: +/// ``` +/// mod tokens { +/// use std::borrow::ToOwned; +/// +/// use iroha_schema::IntoSchema; +/// use iroha_executor_derive::{Token, ValidateGrantRevoke}; +/// use serde::{Deserialize, Serialize}; +/// +/// #[derive(Clone, PartialEq, Deserialize, Serialize, IntoSchema, Token, ValidateGrantRevoke)] +/// #[validate(iroha_executor::permission::OnlyGenesis)] +/// pub struct MyToken; +/// } +/// ``` +macro_rules! declare_tokens { + ($($($token_path:ident ::)+ { $token_ty:ident }),+ $(,)?) => { + macro_rules! map_token { + ($token:ident => $callback:ident) => { + match $token.definition_id().as_ref() { $( + stringify!($token_ty) => { + if let Ok(token) = <$($token_path::)+$token_ty>::try_from($token.clone()) { + $callback!(token); + } + } )+ + _ => {} + } + + }; + } + + macro_rules! map_token_type { + ($callback:ident) => { $( + $callback!($($token_path::)+$token_ty); )+ + }; + } + + pub(crate) use map_token; + pub(crate) use map_token_type; + }; +} + +macro_rules! token { + ($($meta:meta)* $item:item) => { + #[derive(PartialEq, Eq, serde::Serialize, serde::Deserialize)] + #[derive(Clone, iroha_executor_derive::Token)] + #[derive(iroha_schema::IntoSchema)] + $($meta)* + $item + }; +} + +declare_tokens! { + crate::default::tokens::peer::{CanUnregisterAnyPeer}, + + crate::default::tokens::domain::{CanUnregisterDomain}, + crate::default::tokens::domain::{CanSetKeyValueInDomain}, + crate::default::tokens::domain::{CanRemoveKeyValueInDomain}, + + crate::default::tokens::account::{CanUnregisterAccount}, + crate::default::tokens::account::{CanMintUserPublicKeys}, + crate::default::tokens::account::{CanBurnUserPublicKeys}, + crate::default::tokens::account::{CanMintUserSignatureCheckConditions}, + crate::default::tokens::account::{CanSetKeyValueInUserAccount}, + crate::default::tokens::account::{CanRemoveKeyValueInUserAccount}, + + crate::default::tokens::asset_definition::{CanUnregisterAssetDefinition}, + crate::default::tokens::asset_definition::{CanSetKeyValueInAssetDefinition}, + crate::default::tokens::asset_definition::{CanRemoveKeyValueInAssetDefinition}, + + crate::default::tokens::asset::{CanRegisterAssetsWithDefinition}, + crate::default::tokens::asset::{CanUnregisterAssetsWithDefinition}, + crate::default::tokens::asset::{CanUnregisterUserAsset}, + crate::default::tokens::asset::{CanBurnAssetsWithDefinition}, + crate::default::tokens::asset::{CanBurnUserAsset}, + crate::default::tokens::asset::{CanMintAssetsWithDefinition}, + crate::default::tokens::asset::{CanTransferAssetsWithDefinition}, + crate::default::tokens::asset::{CanTransferUserAsset}, + crate::default::tokens::asset::{CanSetKeyValueInUserAsset}, + crate::default::tokens::asset::{CanRemoveKeyValueInUserAsset}, + + crate::default::tokens::parameter::{CanGrantPermissionToCreateParameters}, + crate::default::tokens::parameter::{CanRevokePermissionToCreateParameters}, + crate::default::tokens::parameter::{CanCreateParameters}, + crate::default::tokens::parameter::{CanGrantPermissionToSetParameters}, + crate::default::tokens::parameter::{CanRevokePermissionToSetParameters}, + crate::default::tokens::parameter::{CanSetParameters}, + + crate::default::tokens::role::{CanUnregisterAnyRole}, + + crate::default::tokens::trigger::{CanExecuteUserTrigger}, + crate::default::tokens::trigger::{CanUnregisterUserTrigger}, + crate::default::tokens::trigger::{CanMintUserTrigger}, + crate::default::tokens::trigger::{CanBurnUserTrigger}, + + crate::default::tokens::executor::{CanUpgradeExecutor}, +} + +pub mod peer { + use super::*; + + token! { + #[derive(Copy, ValidateGrantRevoke)] + #[validate(permission::OnlyGenesis)] + pub struct CanUnregisterAnyPeer; + } +} + +pub mod domain { + use super::*; + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::domain::Owner)] + #[validate(permission::domain::Owner)] + pub struct CanUnregisterDomain { + pub domain_id: DomainId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::domain::Owner)] + #[validate(permission::domain::Owner)] + pub struct CanSetKeyValueInDomain { + pub domain_id: DomainId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::domain::Owner)] + #[validate(permission::domain::Owner)] + pub struct CanRemoveKeyValueInDomain { + pub domain_id: DomainId, + } + } +} + +pub mod account { + use super::*; + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] + #[validate(permission::account::Owner)] + pub struct CanUnregisterAccount { + pub account_id: AccountId, + } + } + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] + #[validate(permission::account::Owner)] + pub struct CanMintUserPublicKeys { + pub account_id: AccountId, + } + } + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] + #[validate(permission::account::Owner)] + pub struct CanBurnUserPublicKeys { + pub account_id: AccountId, + } + } + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] + #[validate(permission::account::Owner)] + pub struct CanMintUserSignatureCheckConditions { + pub account_id: AccountId, + } + } + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] + #[validate(permission::account::Owner)] + pub struct CanSetKeyValueInUserAccount { + pub account_id: AccountId, + } + } + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] + #[validate(permission::account::Owner)] + pub struct CanRemoveKeyValueInUserAccount { + pub account_id: AccountId, + } + } +} + +pub mod asset_definition { + use super::*; + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] + #[validate(permission::asset_definition::Owner)] + pub struct CanUnregisterAssetDefinition { + pub asset_definition_id: AssetDefinitionId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] + #[validate(permission::asset_definition::Owner)] + pub struct CanSetKeyValueInAssetDefinition { + pub asset_definition_id: AssetDefinitionId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] + #[validate(permission::asset_definition::Owner)] + pub struct CanRemoveKeyValueInAssetDefinition { + pub asset_definition_id: AssetDefinitionId, + } + } +} + +pub mod asset { + use super::*; + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] + #[validate(permission::asset_definition::Owner)] + pub struct CanRegisterAssetsWithDefinition { + pub asset_definition_id: AssetDefinitionId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] + #[validate(permission::asset_definition::Owner)] + pub struct CanUnregisterAssetsWithDefinition { + pub asset_definition_id: AssetDefinitionId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] + #[validate(permission::asset::Owner)] + pub struct CanUnregisterUserAsset { + pub asset_id: AssetId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] + #[validate(permission::asset_definition::Owner)] + pub struct CanBurnAssetsWithDefinition { + pub asset_definition_id: AssetDefinitionId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] + #[validate(permission::asset::Owner)] + pub struct CanBurnUserAsset { + pub asset_id: AssetId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] + #[validate(permission::asset_definition::Owner)] + pub struct CanMintAssetsWithDefinition { + pub asset_definition_id: AssetDefinitionId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] + #[validate(permission::asset_definition::Owner)] + pub struct CanTransferAssetsWithDefinition { + pub asset_definition_id: AssetDefinitionId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] + #[validate(permission::asset::Owner)] + pub struct CanTransferUserAsset { + pub asset_id: AssetId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] + #[validate(permission::asset::Owner)] + pub struct CanSetKeyValueInUserAsset { + pub asset_id: AssetId, + } + } + + token! { + #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] + #[validate(permission::asset::Owner)] + pub struct CanRemoveKeyValueInUserAsset { + pub asset_id: AssetId, + } + } +} + +pub mod parameter { + use permission::ValidateGrantRevoke; + + use super::*; + + token! { + #[derive(Copy, ValidateGrantRevoke)] + #[validate(permission::OnlyGenesis)] + pub struct CanGrantPermissionToCreateParameters; + } + + token! { + #[derive(Copy, ValidateGrantRevoke)] + #[validate(permission::OnlyGenesis)] + pub struct CanRevokePermissionToCreateParameters; + } + + token! { + #[derive(Copy)] + pub struct CanCreateParameters; + } + + token! { + #[derive(Copy, ValidateGrantRevoke)] + #[validate(permission::OnlyGenesis)] + pub struct CanGrantPermissionToSetParameters; + } + + token! { + #[derive(Copy, ValidateGrantRevoke)] + #[validate(permission::OnlyGenesis)] + pub struct CanRevokePermissionToSetParameters; + } + + token! { + #[derive(Copy)] + pub struct CanSetParameters; + } + + impl ValidateGrantRevoke for CanCreateParameters { + fn validate_grant(&self, authority: &AccountId, _block_height: u64) -> Result { + if CanGrantPermissionToCreateParameters.is_owned_by(authority) { + return Ok(()); + } + + Err(ValidationFail::NotPermitted( + "Can't grant permission to create new configuration parameters outside genesis without permission from genesis" + .to_owned() + )) + } + + fn validate_revoke(&self, authority: &AccountId, _block_height: u64) -> Result { + if CanGrantPermissionToCreateParameters.is_owned_by(authority) { + return Ok(()); + } + + Err(ValidationFail::NotPermitted( + "Can't revoke permission to create new configuration parameters outside genesis without permission from genesis" + .to_owned() + )) + } + } + + impl ValidateGrantRevoke for CanSetParameters { + fn validate_grant(&self, authority: &AccountId, _block_height: u64) -> Result { + if CanGrantPermissionToSetParameters.is_owned_by(authority) { + return Ok(()); + } + + Err(ValidationFail::NotPermitted( + "Can't grant permission to set configuration parameters outside genesis without permission from genesis" + .to_owned() + )) + } + + fn validate_revoke(&self, authority: &AccountId, _block_height: u64) -> Result { + if CanRevokePermissionToSetParameters.is_owned_by(authority) { + return Ok(()); + } + + Err(ValidationFail::NotPermitted( + "Can't revoke permission to set configuration parameters outside genesis without permission from genesis" + .to_owned() + )) + } + } +} + +pub mod role { + use super::*; + + token! { + #[derive(Copy, ValidateGrantRevoke)] + #[validate(permission::OnlyGenesis)] + pub struct CanUnregisterAnyRole; + } +} + +pub mod trigger { + use super::*; + + macro_rules! impl_froms { + ($($name:path),+ $(,)?) => {$( + impl<'token> From<&'token $name> for permission::trigger::Owner<'token> { + fn from(value: &'token $name) -> Self { + Self { + trigger_id: &value.trigger_id, + } + } + } + )+}; + } + + token! { + #[derive(ValidateGrantRevoke)] + #[validate(permission::trigger::Owner)] + pub struct CanExecuteUserTrigger { + pub trigger_id: TriggerId, + } + } + + token! { + #[derive(ValidateGrantRevoke)] + #[validate(permission::trigger::Owner)] + pub struct CanUnregisterUserTrigger { + pub trigger_id: TriggerId, + } + } + + token! { + #[derive(ValidateGrantRevoke)] + #[validate(permission::trigger::Owner)] + pub struct CanMintUserTrigger { + pub trigger_id: TriggerId, + } + } + + token! { + #[derive(ValidateGrantRevoke)] + #[validate(permission::trigger::Owner)] + pub struct CanBurnUserTrigger { + pub trigger_id: TriggerId, + } + } + + impl_froms!( + CanExecuteUserTrigger, + CanUnregisterUserTrigger, + CanMintUserTrigger, + CanBurnUserTrigger, + ); +} + +pub mod executor { + use super::*; + + token! { + #[derive(Copy, ValidateGrantRevoke)] + #[validate(permission::OnlyGenesis)] + pub struct CanUpgradeExecutor; + } +} diff --git a/smart_contract/executor/src/lib.rs b/smart_contract/executor/src/lib.rs new file mode 100644 index 00000000000..a8d9dea11e0 --- /dev/null +++ b/smart_contract/executor/src/lib.rs @@ -0,0 +1,237 @@ +//! API for *Runtime Executors*. +#![no_std] +#![allow(unsafe_code)] + +extern crate alloc; +extern crate self as iroha_executor; + +use alloc::vec::Vec; + +pub use iroha_data_model as data_model; +use iroha_data_model::{ + executor::Result, permission::PermissionTokenId, visit::Visit, ValidationFail, +}; +#[cfg(not(test))] +use iroha_data_model::{prelude::*, smart_contract::payloads}; +pub use iroha_schema::MetaMap; +pub use iroha_smart_contract as smart_contract; +pub use iroha_smart_contract_utils::{debug, encode_with_length_prefix}; +#[cfg(not(test))] +use iroha_smart_contract_utils::{decode_with_length_prefix_from_raw, encode_and_execute}; +pub use smart_contract::parse; + +pub mod default; +pub mod permission; + +pub mod utils { + //! Crate with utilities for implementing smart contract FFI + pub use iroha_smart_contract_utils::encode_with_length_prefix; +} + +pub mod log { + //! WASM logging utilities + pub use iroha_smart_contract_utils::{debug, error, event, info, log::*, trace, warn}; +} + +/// Get payload for `validate_transaction()` entrypoint. +/// +/// # Traps +/// +/// Host side will generate a trap if this function was called not from a +/// executor `validate_transaction()` entrypoint. +#[cfg(not(test))] +pub fn get_validate_transaction_payload() -> payloads::Validate { + // Safety: ownership of the returned result is transferred into `_decode_from_raw` + unsafe { decode_with_length_prefix_from_raw(host::get_validate_transaction_payload()) } +} + +/// Get payload for `validate_instruction()` entrypoint. +/// +/// # Traps +/// +/// Host side will generate a trap if this function was called not from a +/// executor `validate_instruction()` entrypoint. +#[cfg(not(test))] +pub fn get_validate_instruction_payload() -> payloads::Validate { + // Safety: ownership of the returned result is transferred into `_decode_from_raw` + unsafe { decode_with_length_prefix_from_raw(host::get_validate_instruction_payload()) } +} + +/// Get payload for `validate_query()` entrypoint. +/// +/// # Traps +/// +/// Host side will generate a trap if this function was called not from a +/// executor `validate_query()` entrypoint. +#[cfg(not(test))] +pub fn get_validate_query_payload() -> payloads::Validate { + // Safety: ownership of the returned result is transferred into `_decode_from_raw` + unsafe { decode_with_length_prefix_from_raw(host::get_validate_query_payload()) } +} + +/// Get payload for `migrate()` entrypoint. +/// +/// # Traps +/// +/// Host side will generate a trap if this function was called not from a +/// executor `migrate()` entrypoint. +#[cfg(not(test))] +pub fn get_migrate_payload() -> payloads::Migrate { + // Safety: ownership of the returned result is transferred into `_decode_from_raw` + unsafe { decode_with_length_prefix_from_raw(host::get_migrate_payload()) } +} + +/// Set new [`PermissionTokenSchema`]. +/// +/// # Errors +/// +/// - If execution on Iroha side failed +/// +/// # Traps +/// +/// Host side will generate a trap if this function was not called from a +/// executor's `migrate()` entrypoint. +#[cfg(not(test))] +pub fn set_permission_token_schema(schema: &data_model::permission::PermissionTokenSchema) { + // Safety: - ownership of the returned result is transferred into `_decode_from_raw` + unsafe { encode_and_execute(&schema, host::set_permission_token_schema) } +} + +#[cfg(not(test))] +mod host { + #[link(wasm_import_module = "iroha")] + extern "C" { + /// Get payload for `validate_transaction()` entrypoint. + /// + /// # Warning + /// + /// This function does transfer ownership of the result to the caller + pub(super) fn get_validate_transaction_payload() -> *const u8; + + /// Get payload for `validate_instruction()` entrypoint. + /// + /// # Warning + /// + /// This function does transfer ownership of the result to the caller + pub(super) fn get_validate_instruction_payload() -> *const u8; + + /// Get payload for `validate_query()` entrypoint. + /// + /// # Warning + /// + /// This function does transfer ownership of the result to the caller + pub(super) fn get_validate_query_payload() -> *const u8; + + /// Get payload for `migrate()` entrypoint. + /// + /// # Warning + /// + /// This function does transfer ownership of the result to the caller + pub(super) fn get_migrate_payload() -> *const u8; + + /// Set new [`PermissionTokenSchema`]. + pub(super) fn set_permission_token_schema(ptr: *const u8, len: usize); + } +} + +/// Shortcut for `return Ok(())`. +#[macro_export] +macro_rules! pass { + ($executor:ident) => {{ + #[cfg(debug_assertions)] + if let Err(_error) = $executor.verdict() { + unreachable!("Executor already denied"); + } + + return; + }}; +} + +/// Shortcut for `return Err(ValidationFail)`. +/// +/// Supports [`format!`](alloc::fmt::format) syntax as well as any expression returning [`String`](alloc::string::String). +#[macro_export] +macro_rules! deny { + ($executor:ident, $l:literal $(,)?) => {{ + #[cfg(debug_assertions)] + if let Err(_error) = $executor.verdict() { + unreachable!("Executor already denied"); + } + $executor.deny($crate::data_model::ValidationFail::NotPermitted( + ::alloc::fmt::format(::core::format_args!($l)), + )); + return; + }}; + ($executor:ident, $e:expr $(,)?) => {{ + #[cfg(debug_assertions)] + if let Err(_error) = $executor.verdict() { + unreachable!("Executor already denied"); + } + $executor.deny($e); + return; + }}; +} + +/// Collection of all permission tokens defined by the executor +#[derive(Debug, Clone, Default)] +pub struct PermissionTokenSchema(Vec, MetaMap); + +impl PermissionTokenSchema { + /// Remove permission token from this collection + pub fn remove(&mut self) { + let to_remove = ::name(); + + if let Some(pos) = self.0.iter().position(|token_id| *token_id == to_remove) { + self.0.remove(pos); + ::remove_from_schema(&mut self.1); + } + } + + /// Insert new permission token into this collection + pub fn insert(&mut self) { + ::update_schema_map(&mut self.1); + self.0.push(::name()); + } + + /// Serializes schema into a JSON string representation + pub fn serialize(mut self) -> (Vec, alloc::string::String) { + self.0.sort(); + + ( + self.0, + serde_json::to_string(&self.1).expect("schema serialization must not fail"), + ) + } +} + +/// Executor of Iroha operations +pub trait Validate: Visit { + /// Executor verdict. + fn verdict(&self) -> &Result; + + /// Current block height. + fn block_height(&self) -> u64; + + /// Set executor verdict to deny + fn deny(&mut self, reason: ValidationFail); +} + +pub mod prelude { + //! Contains useful re-exports + + pub use alloc::vec::Vec; + + pub use iroha_data_model::{ + executor::{MigrationError, MigrationResult, Result}, + prelude::*, + visit::Visit, + ValidationFail, + }; + pub use iroha_executor_derive::{ + entrypoint, Constructor, ExpressionEvaluator, Token, Validate, ValidateEntrypoints, + ValidateGrantRevoke, Visit, + }; + pub use iroha_smart_contract::{prelude::*, Context}; + + pub use super::{deny, pass, PermissionTokenSchema, Validate}; +} diff --git a/wasm/validator/src/permission.rs b/smart_contract/executor/src/permission.rs similarity index 56% rename from wasm/validator/src/permission.rs rename to smart_contract/executor/src/permission.rs index 300e7c8e5d2..e08040fe76a 100644 --- a/wasm/validator/src/permission.rs +++ b/smart_contract/executor/src/permission.rs @@ -3,6 +3,8 @@ use alloc::borrow::ToOwned as _; use iroha_schema::IntoSchema; +use iroha_smart_contract::QueryOutputCursor; +use iroha_smart_contract_utils::debug::DebugExpectExt as _; use serde::{de::DeserializeOwned, Serialize}; use crate::{data_model::prelude::*, prelude::*}; @@ -55,24 +57,30 @@ pub enum PermissionTokenConversionError { pub mod derive_conversions { //! Module with derive macros to generate conversion from custom strongly-typed token - //! to some pass condition to successfully derive [`ValidateGrantRevoke`](iroha_validator_derive::ValidateGrantRevoke) + //! to some pass condition to successfully derive [`ValidateGrantRevoke`](iroha_executor_derive::ValidateGrantRevoke) pub mod asset { //! Module with derives related to asset tokens - pub use iroha_validator_derive::RefIntoAssetOwner as Owner; + pub use iroha_executor_derive::RefIntoAssetOwner as Owner; } pub mod asset_definition { //! Module with derives related to asset definition tokens - pub use iroha_validator_derive::RefIntoAssetDefinitionOwner as Owner; + pub use iroha_executor_derive::RefIntoAssetDefinitionOwner as Owner; } pub mod account { //! Module with derives related to account tokens - pub use iroha_validator_derive::RefIntoAccountOwner as Owner; + pub use iroha_executor_derive::RefIntoAccountOwner as Owner; + } + + pub mod domain { + //! Module with derives related to domain tokens + + pub use iroha_executor_derive::RefIntoDomainOwner as Owner; } } @@ -81,15 +89,29 @@ pub mod asset { use super::*; + /// Check if `authority` is the owner of `asset_id`. + /// + /// `authority` is owner of `asset_id` if: + /// - `asset_id.account_id` is `account_id` + /// - `asset_id.account_id.domain_id` domain is owned by `authority` + /// + /// # Errors + /// + /// Fails if `is_account_owner` fails + pub fn is_asset_owner(asset_id: &AssetId, authority: &AccountId) -> Result { + crate::permission::account::is_account_owner(asset_id.account_id(), authority) + } + /// Pass condition that checks if `authority` is the owner of `asset_id`. #[derive(Debug, Clone)] pub struct Owner<'asset> { + /// Asset id to check against pub asset_id: &'asset AssetId, } impl PassCondition for Owner<'_> { fn validate(&self, authority: &AccountId, _block_height: u64) -> Result { - if self.asset_id.account_id() == authority { + if is_asset_owner(self.asset_id, authority)? { return Ok(()); } @@ -105,16 +127,34 @@ pub mod asset_definition { use super::*; - fn is_asset_definition_owner( + /// Check if `authority` is the owner of `asset_definition_id` + + /// `authority` is owner of `asset_definition_id` if: + /// - `asset_definition.owned_by` is `authority` + /// - `asset_definition.domain_id` domain is owned by `authority` + /// + /// # Errors + /// - if `FindAssetDefinitionById` fails + /// - if `is_domain_owner` fails + pub fn is_asset_definition_owner( asset_definition_id: &AssetDefinitionId, authority: &AccountId, ) -> Result { - IsAssetDefinitionOwner::new(asset_definition_id.clone(), authority.clone()).execute() + let asset_definition = FindAssetDefinitionById::new(asset_definition_id.clone()) + .execute() + .map(QueryOutputCursor::into_raw_parts) + .map(|(batch, _cursor)| batch)?; + if asset_definition.owned_by() == authority { + Ok(true) + } else { + crate::permission::domain::is_domain_owner(asset_definition_id.domain_id(), authority) + } } /// Pass condition that checks if `authority` is the owner of `asset_definition_id`. #[derive(Debug, Clone)] pub struct Owner<'asset_definition> { + /// Asset definition id to check against pub asset_definition_id: &'asset_definition AssetDefinitionId, } @@ -136,15 +176,33 @@ pub mod account { use super::*; + /// Check if `authority` is the owner of `account_id`. + /// + /// `authority` is owner of `account_id` if: + /// - `account_id` is `authority` + /// - `account_id.domain_id` is owned by `authority` + /// + /// # Errors + /// + /// Fails if `is_domain_owner` fails + pub fn is_account_owner(account_id: &AccountId, authority: &AccountId) -> Result { + if account_id == authority { + Ok(true) + } else { + crate::permission::domain::is_domain_owner(account_id.domain_id(), authority) + } + } + /// Pass condition that checks if `authority` is the owner of `account_id`. #[derive(Debug, Clone)] pub struct Owner<'asset> { + /// Account id to check against pub account_id: &'asset AccountId, } impl PassCondition for Owner<'_> { fn validate(&self, authority: &AccountId, _block_height: u64) -> Result { - if self.account_id == authority { + if is_account_owner(self.account_id, authority)? { return Ok(()); } @@ -161,26 +219,40 @@ pub mod trigger { /// Check if `authority` is the owner of `trigger_id`. /// - /// Wrapper around [`FindTriggerById`](crate::data_model::prelude::FindTriggerById) query. + /// `authority` is owner of `trigger_id` if: + /// - `trigger.action.authority` is `authority` + /// - `trigger.domain_id` is not none and domain is owned by `authority` /// /// # Errors - /// - /// Fails if query fails - pub fn is_trigger_owner(trigger_id: TriggerId, authority: &AccountId) -> Result { - FindTriggerById::new(trigger_id) + /// - `FindTrigger` fails + /// - `is_domain_owner` fails + pub fn is_trigger_owner(trigger_id: &TriggerId, authority: &AccountId) -> Result { + let trigger = FindTriggerById::new(trigger_id.clone()) .execute() - .map(|trigger| trigger.action().authority() == authority) + .map(QueryOutputCursor::into_raw_parts) + .map(|(batch, _cursor)| batch)?; + if trigger.action().authority() == authority { + Ok(true) + } else { + trigger_id + .domain_id() + .as_ref() + .map_or(Ok(false), |domain_id| { + crate::permission::domain::is_domain_owner(domain_id, authority) + }) + } } /// Pass condition that checks if `authority` is the owner of `trigger_id`. #[derive(Debug, Clone)] pub struct Owner<'trigger> { + /// Trigger id to check against pub trigger_id: &'trigger TriggerId, } impl PassCondition for Owner<'_> { fn validate(&self, authority: &AccountId, _block_height: u64) -> Result { - if is_trigger_owner(self.trigger_id.clone(), authority)? { + if is_trigger_owner(self.trigger_id, authority)? { return Ok(()); } @@ -191,6 +263,42 @@ pub mod trigger { } } +pub mod domain { + //! Module with pass conditions for domain related tokens + use super::*; + + /// Check if `authority` is owner of `domain_id` + /// + /// # Errors + /// Fails if query fails + pub fn is_domain_owner(domain_id: &DomainId, authority: &AccountId) -> Result { + FindDomainById::new(domain_id.clone()) + .execute() + .map(QueryOutputCursor::into_raw_parts) + .map(|(batch, _cursor)| batch) + .map(|domain| domain.owned_by() == authority) + } + + /// Pass condition that checks if `authority` is the owner of `domain_id`. + #[derive(Debug, Clone)] + pub struct Owner<'domain> { + /// Domain id to check against + pub domain_id: &'domain DomainId, + } + + impl PassCondition for Owner<'_> { + fn validate(&self, authority: &AccountId, _block_height: u64) -> Result { + if is_domain_owner(self.domain_id, authority)? { + return Ok(()); + } + + Err(ValidationFail::NotPermitted( + "Can't access domain owned by another account".to_owned(), + )) + } + } +} + /// Pass condition that always passes. #[derive(Debug, Default, Copy, Clone)] pub struct AlwaysPass; diff --git a/smart_contract/src/lib.rs b/smart_contract/src/lib.rs new file mode 100644 index 00000000000..5ec0acd2083 --- /dev/null +++ b/smart_contract/src/lib.rs @@ -0,0 +1,549 @@ +//! API which simplifies writing of smartcontracts +#![no_std] +#![allow(unsafe_code)] + +extern crate alloc; + +use alloc::{boxed::Box, collections::BTreeMap, vec::Vec}; + +#[cfg(not(test))] +use data_model::smart_contract::payloads; +use data_model::{ + isi::Instruction, + prelude::*, + query::{cursor::ForwardCursor, sorting::Sorting, Pagination, Query, QueryBox}, + smart_contract::SmartContractQueryRequest, + BatchedResponse, +}; +use derive_more::Display; +pub use iroha_data_model as data_model; +use iroha_macro::error::ErrorTryFromEnum; +pub use iroha_smart_contract_derive::main; +pub use iroha_smart_contract_utils::{debug, log}; +use iroha_smart_contract_utils::{ + debug::DebugExpectExt as _, decode_with_length_prefix_from_raw, encode_and_execute, +}; +use parity_scale_codec::{DecodeAll, Encode}; + +#[no_mangle] +extern "C" fn _iroha_smart_contract_alloc(len: usize) -> *const u8 { + if len == 0 { + iroha_smart_contract_utils::debug::dbg_panic("Cannot allocate 0 bytes"); + } + let layout = core::alloc::Layout::array::(len).dbg_expect("Cannot allocate layout"); + // Safety: safe because `layout` is guaranteed to have non-zero size + unsafe { alloc::alloc::alloc_zeroed(layout) } +} + +/// # Safety +/// - `offset` is a pointer to a `[u8; len]` which is allocated in the WASM memory. +/// - This function can't call destructor of the encoded object. +#[no_mangle] +unsafe extern "C" fn _iroha_smart_contract_dealloc(offset: *mut u8, len: usize) { + let _box = Box::from_raw(core::slice::from_raw_parts_mut(offset, len)); +} + +/// Macro to parse literal as a type. Panics if failed. +/// +/// # Example +/// +/// ``` +/// use iroha_smart_contract::{prelude::*, parse}; +/// +/// let account_id = parse!("alice@wonderland" as AccountId); +/// ``` +#[macro_export] +macro_rules! parse { + ($l:literal as _) => { + compile_error!( + "Don't use `_` as a type in this macro, \ + otherwise panic message would be less informative" + ) + }; + ($l:literal as $t:ty) => { + $crate::debug::DebugExpectExt::dbg_expect( + $l.parse::<$t>(), + concat!("Failed to parse `", $l, "` as `", stringify!($t), "`"), + ) + }; +} + +/// Implementing instructions can be executed on the host +pub trait ExecuteOnHost: Instruction { + /// Execute instruction on the host + /// + /// # Errors + /// + /// - If instruction validation failed + /// - If instruction execution failed + fn execute(&self) -> Result<(), ValidationFail>; +} + +// TODO: Remove the Clone bound. It can be done by custom serialization to InstructionExpr +impl ExecuteOnHost for I { + fn execute(&self) -> Result<(), ValidationFail> { + #[cfg(not(test))] + use host::execute_instruction as host_execute_instruction; + #[cfg(test)] + use tests::_iroha_smart_contract_execute_instruction_mock as host_execute_instruction; + + // TODO: Redundant conversion into `InstructionExpr` + let isi_box: InstructionExpr = self.clone().into(); + // Safety: `host_execute_instruction` doesn't take ownership of it's pointer parameter + unsafe { + decode_with_length_prefix_from_raw(encode_and_execute( + &isi_box, + host_execute_instruction, + )) + } + } +} + +/// Generic query request containing additional parameters. +#[derive(Debug)] +pub struct QueryRequest { + query: Q, + sorting: Sorting, + pagination: Pagination, + fetch_size: FetchSize, +} + +impl From> for SmartContractQueryRequest { + fn from(query_request: QueryRequest) -> Self { + SmartContractQueryRequest::query( + query_request.query.into(), + query_request.sorting, + query_request.pagination, + query_request.fetch_size, + ) + } +} + +/// Implementing queries can be executed on the host +/// +/// TODO: `&self` should be enough +pub trait ExecuteQueryOnHost: Sized { + /// Query output type. + type Output; + + /// Type of [`QueryRequest`]. + type QueryRequest; + + /// Apply sorting to a query + fn sort(self, sorting: Sorting) -> Self::QueryRequest; + + /// Apply pagination to a query + fn paginate(self, pagination: Pagination) -> Self::QueryRequest; + + /// Set fetch size for a query. Default is [`DEFAULT_FETCH_SIZE`] + fn fetch_size(self, fetch_size: FetchSize) -> Self::QueryRequest; + + /// Execute query on the host + /// + /// # Errors + /// + /// - If query validation failed + /// - If query execution failed + fn execute(self) -> Result, ValidationFail>; +} + +impl ExecuteQueryOnHost for Q +where + Q::Output: DecodeAll, + >::Error: core::fmt::Debug, +{ + type Output = Q::Output; + type QueryRequest = QueryRequest; + + fn sort(self, sorting: Sorting) -> Self::QueryRequest { + QueryRequest { + query: self, + sorting, + pagination: Pagination::default(), + fetch_size: FetchSize::default(), + } + } + + fn paginate(self, pagination: Pagination) -> Self::QueryRequest { + QueryRequest { + query: self, + sorting: Sorting::default(), + pagination, + fetch_size: FetchSize::default(), + } + } + + fn fetch_size(self, fetch_size: FetchSize) -> Self::QueryRequest { + QueryRequest { + query: self, + sorting: Sorting::default(), + pagination: Pagination::default(), + fetch_size, + } + } + + fn execute(self) -> Result, ValidationFail> { + QueryRequest { + query: self, + sorting: Sorting::default(), + pagination: Pagination::default(), + fetch_size: FetchSize::default(), + } + .execute() + } +} + +impl ExecuteQueryOnHost for QueryRequest +where + Q::Output: DecodeAll, + >::Error: core::fmt::Debug, +{ + type Output = Q::Output; + type QueryRequest = Self; + + fn sort(mut self, sorting: Sorting) -> Self { + self.sorting = sorting; + self + } + + fn paginate(mut self, pagination: Pagination) -> Self { + self.pagination = pagination; + self + } + + fn fetch_size(mut self, fetch_size: FetchSize) -> Self::QueryRequest { + self.fetch_size = fetch_size; + self + } + + #[allow(irrefutable_let_patterns)] + fn execute(self) -> Result, ValidationFail> { + #[cfg(not(test))] + use host::execute_query as host_execute_query; + #[cfg(test)] + use tests::_iroha_smart_contract_execute_query_mock as host_execute_query; + + let wasm_query_request = SmartContractQueryRequest::from(self); + + // Safety: - `host_execute_query` doesn't take ownership of it's pointer parameter + // - ownership of the returned result is transferred into `_decode_from_raw` + let res: Result, ValidationFail> = unsafe { + decode_with_length_prefix_from_raw(encode_and_execute( + &wasm_query_request, + host_execute_query, + )) + }; + + let (value, cursor) = res?.into(); + let typed_value = Self::Output::try_from(value).expect("Query output has incorrect type"); + Ok(QueryOutputCursor { + batch: typed_value, + cursor, + }) + } +} + +/// Cursor over query results implementing [`IntoIterator`]. +/// +/// If you execute [`QueryBox`] when you probably want to use [`collect()`](Self::collect) method +/// instead of [`into_iter()`](Self::into_iter) to ensure that all results vere consumed. +#[derive(Debug, Encode, PartialEq, Eq)] +pub struct QueryOutputCursor { + batch: T, + cursor: ForwardCursor, +} + +impl QueryOutputCursor { + /// Get inner values of batch and cursor, consuming [`Self`]. + pub fn into_raw_parts(self) -> (T, ForwardCursor) { + (self.batch, self.cursor) + } +} + +impl QueryOutputCursor { + /// Same as [`into_inner()`](Self::into_inner) but collects all values of [`Value::Vec`] + /// in case if there are some cached results left on the host side. + /// + /// # Errors + /// + /// May fail due to the same reasons [`QueryOutputCursorIterator`] can fail to iterate. + pub fn collect(self) -> Result>> { + let Value::Vec(v) = self.batch else { + return Ok(self.batch) + }; + + // Making sure we received all values + let cursor = QueryOutputCursor { + batch: v, + cursor: self.cursor, + }; + cursor + .into_iter() + .collect::, _>>() + .map(Value::Vec) + } +} + +impl> IntoIterator for QueryOutputCursor> { + type Item = Result>>; + type IntoIter = QueryOutputCursorIterator; + + fn into_iter(self) -> Self::IntoIter { + QueryOutputCursorIterator { + iter: self.batch.into_iter(), + cursor: self.cursor, + } + } +} + +/// Iterator over query results. +/// +/// # Errors +/// +/// Iteration may fail due to the following reasons: +/// +/// - Failed to get next batch of results from the host +/// - Failed to convert batch of results into the requested type +/// +/// # Panics +/// +/// Panics if response from host is not [`BatchedResponse::V1`]. +pub struct QueryOutputCursorIterator { + iter: as IntoIterator>::IntoIter, + cursor: ForwardCursor, +} + +impl> QueryOutputCursorIterator { + #[allow(irrefutable_let_patterns)] + fn next_batch(&self) -> Result>> { + #[cfg(not(test))] + use host::execute_query as host_execute_query; + #[cfg(test)] + use tests::_iroha_smart_contract_execute_query_mock as host_execute_query; + + let wasm_query_request = SmartContractQueryRequest::cursor(self.cursor.clone()); + + // Safety: - `host_execute_query` doesn't take ownership of it's pointer parameter + // - ownership of the returned result is transferred into `_decode_from_raw` + let res: Result, ValidationFail> = unsafe { + decode_with_length_prefix_from_raw(encode_and_execute( + &wasm_query_request, + host_execute_query, + )) + }; + let (value, cursor) = res?.into(); + let vec = Vec::::try_from(value)?; + Ok(Self { + iter: vec.into_iter(), + cursor, + }) + } +} + +impl> Iterator for QueryOutputCursorIterator { + type Item = Result>>; + + fn next(&mut self) -> Option { + if let Some(item) = self.iter.next() { + return Some(Ok(item)); + } + + let mut next_iter = match self.next_batch() { + Ok(next_iter) => next_iter, + Err(QueryOutputCursorError::Validation(ValidationFail::QueryFailed( + iroha_data_model::query::error::QueryExecutionFail::UnknownCursor, + ))) => return None, + Err(err) => return Some(Err(err)), + }; + + core::mem::swap(self, &mut next_iter); + self.iter.next().map(Ok) + } +} + +/// Error iterating other query results. +#[derive(Debug, Display, iroha_macro::FromVariant)] +pub enum QueryOutputCursorError { + /// Validation error on the host side during next batch retrieval. + Validation(ValidationFail), + /// Host returned unexpected output type. + Conversion(ErrorTryFromEnum), +} + +/// World state view of the host +#[derive(Debug, Clone, Copy)] +pub struct Host; + +impl iroha_data_model::evaluate::ExpressionEvaluator for Host { + fn evaluate( + &self, + expression: &E, + ) -> Result { + expression.evaluate(&Context::new()) + } +} + +/// Context of expression evaluation +#[derive(Clone, Default)] +#[repr(transparent)] +pub struct Context { + values: BTreeMap, +} + +impl Context { + /// Create new [`Self`] + pub fn new() -> Self { + Self { + values: BTreeMap::new(), + } + } +} + +impl iroha_data_model::evaluate::Context for Context { + fn query(&self, query: &QueryBox) -> Result { + let value_cursor = query.clone().execute()?; + match value_cursor.collect() { + Ok(value) => Ok(value), + Err(QueryOutputCursorError::Validation(err)) => Err(err), + Err(QueryOutputCursorError::Conversion(err)) => { + panic!("Conversion error during collecting query result: {err:?}") + } + } + } + + fn get(&self, name: &Name) -> Option<&Value> { + self.values.get(name) + } + + fn update(&mut self, other: impl IntoIterator) { + self.values.extend(other) + } +} + +/// Get payload for smart contract `main()` entrypoint. +#[cfg(not(test))] +pub fn get_smart_contract_payload() -> payloads::SmartContract { + // Safety: ownership of the returned result is transferred into `_decode_from_raw` + unsafe { decode_with_length_prefix_from_raw(host::get_smart_contract_payload()) } +} + +#[cfg(not(test))] +mod host { + #[link(wasm_import_module = "iroha")] + extern "C" { + /// Execute encoded query by providing offset and length + /// into WebAssembly's linear memory where query is stored + /// + /// # Warning + /// + /// This function doesn't take ownership of the provided allocation + /// but it does transfer ownership of the result to the caller + pub(super) fn execute_query(ptr: *const u8, len: usize) -> *const u8; + + /// Execute encoded instruction by providing offset and length + /// into WebAssembly's linear memory where instruction is stored + /// + /// # Warning + /// + /// This function doesn't take ownership of the provided allocation + /// but it does transfer ownership of the result to the caller + pub(super) fn execute_instruction(ptr: *const u8, len: usize) -> *const u8; + + /// Get payload for smart contract `main()` entrypoint. + /// + /// # Warning + /// + /// This function does transfer ownership of the result to the caller + pub(super) fn get_smart_contract_payload() -> *const u8; + } +} + +/// Most used items +pub mod prelude { + pub use iroha_smart_contract_derive::main; + pub use iroha_smart_contract_utils::debug::DebugUnwrapExt; + + pub use crate::{data_model::prelude::*, ExecuteOnHost, ExecuteQueryOnHost}; +} + +#[cfg(test)] +mod tests { + use core::{mem::ManuallyDrop, slice}; + + use data_model::{query::asset::FindAssetQuantityById, BatchedResponseV1}; + use iroha_smart_contract_utils::encode_with_length_prefix; + use webassembly_test::webassembly_test; + + use super::*; + + const QUERY_RESULT: Result, ValidationFail> = Ok(QueryOutputCursor { + batch: Value::Numeric(NumericValue::U32(1234_u32)), + cursor: ForwardCursor::new(None, None), + }); + const ISI_RESULT: Result<(), ValidationFail> = Ok(()); + const EXPRESSION_RESULT: NumericValue = NumericValue::U32(5_u32); + + fn get_test_instruction() -> InstructionExpr { + let new_account_id = "mad_hatter@wonderland".parse().expect("Valid"); + let register_isi = RegisterExpr::new(Account::new(new_account_id, [])); + + register_isi.into() + } + + fn get_test_query() -> QueryBox { + let asset_id: AssetId = "rose##alice@wonderland".parse().expect("Valid"); + FindAssetQuantityById::new(asset_id).into() + } + + fn get_test_expression() -> EvaluatesTo { + Add::new(2_u32, 3_u32).into() + } + + #[no_mangle] + pub unsafe extern "C" fn _iroha_smart_contract_execute_instruction_mock( + ptr: *const u8, + len: usize, + ) -> *const u8 { + let bytes = slice::from_raw_parts(ptr, len); + let instruction = InstructionExpr::decode_all(&mut &*bytes); + assert_eq!(get_test_instruction(), instruction.unwrap()); + + ManuallyDrop::new(encode_with_length_prefix(&ISI_RESULT)).as_ptr() + } + + #[no_mangle] + pub unsafe extern "C" fn _iroha_smart_contract_execute_query_mock( + ptr: *const u8, + len: usize, + ) -> *const u8 { + let bytes = slice::from_raw_parts(ptr, len); + let query_request = SmartContractQueryRequest::decode_all(&mut &*bytes).unwrap(); + let query = query_request.unwrap_query().0; + assert_eq!(query, get_test_query()); + + let response: Result, ValidationFail> = Ok(BatchedResponseV1::new( + QUERY_RESULT.unwrap().into_raw_parts().0, + ForwardCursor::new(None, None), + ) + .into()); + ManuallyDrop::new(encode_with_length_prefix(&response)).as_ptr() + } + + #[webassembly_test] + fn execute_instruction() { + get_test_instruction().execute().unwrap(); + } + + #[webassembly_test] + fn execute_query() { + assert_eq!(get_test_query().execute(), QUERY_RESULT); + } + + #[webassembly_test] + fn evaluate_expression() { + assert_eq!( + get_test_expression().evaluate(&Context::new()), + Ok(EXPRESSION_RESULT) + ); + } +} diff --git a/wasm/trigger/Cargo.toml b/smart_contract/trigger/Cargo.toml similarity index 50% rename from wasm/trigger/Cargo.toml rename to smart_contract/trigger/Cargo.toml index 117b4585933..72da4d0677f 100644 --- a/wasm/trigger/Cargo.toml +++ b/smart_contract/trigger/Cargo.toml @@ -6,13 +6,15 @@ authors.workspace = true edition.workspace = true license.workspace = true +[lints] +workspace = true + [features] # Enables debugging tools such as `dbg()` and `DebugUnwrapExt` -debug = ["iroha_wasm/debug"] +debug = ["iroha_smart_contract/debug"] [dependencies] -iroha_wasm = { version = "2.0.0-pre-rc.18", path = ".." } -iroha_trigger_derive = { version = "2.0.0-pre-rc.18", path = "derive" } - -[dev-dependencies] -webassembly-test.workspace = true +iroha_smart_contract_utils.workspace = true +iroha_smart_contract.workspace = true +iroha_trigger_derive.workspace = true +iroha_data_model.workspace = true diff --git a/wasm/trigger/derive/Cargo.toml b/smart_contract/trigger/derive/Cargo.toml similarity index 88% rename from wasm/trigger/derive/Cargo.toml rename to smart_contract/trigger/derive/Cargo.toml index 2dedb97d33c..486eaa75ad7 100644 --- a/wasm/trigger/derive/Cargo.toml +++ b/smart_contract/trigger/derive/Cargo.toml @@ -7,12 +7,14 @@ edition.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true [dependencies] -iroha_data_model.workspace = true syn.workspace = true quote.workspace = true proc-macro2.workspace = true diff --git a/wasm/trigger/derive/src/entrypoint.rs b/smart_contract/trigger/derive/src/entrypoint.rs similarity index 76% rename from wasm/trigger/derive/src/entrypoint.rs rename to smart_contract/trigger/derive/src/entrypoint.rs index 1cecaba0f58..e045d262b5a 100644 --- a/wasm/trigger/derive/src/entrypoint.rs +++ b/smart_contract/trigger/derive/src/entrypoint.rs @@ -2,6 +2,10 @@ use super::*; +mod export { + pub const TRIGGER_MAIN: &str = "_iroha_trigger_main"; +} + /// [`main`](super::main()) macro implementation #[allow(clippy::needless_pass_by_value)] pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { @@ -22,29 +26,27 @@ pub fn impl_entrypoint(_attr: TokenStream, item: TokenStream) -> TokenStream { block.stmts.insert( 0, parse_quote!( - use ::iroha_trigger::iroha_wasm::{ - debug::DebugExpectExt as _, ExecuteOnHost as _, QueryHost as _, + use ::iroha_trigger::smart_contract::{ + debug::DebugExpectExt as _, ExecuteOnHost as _, ExecuteQueryOnHost as _, }; ), ); - let main_fn_name = syn::Ident::new( - iroha_data_model::wasm::export::fn_names::TRIGGER_MAIN, - proc_macro2::Span::call_site(), - ); + let main_fn_name = syn::Ident::new(export::TRIGGER_MAIN, proc_macro2::Span::call_site()); quote! { /// Smart contract entrypoint #[no_mangle] #[doc(hidden)] unsafe extern "C" fn #main_fn_name() { - let payload = ::iroha_trigger::iroha_wasm::get_trigger_payload(); + let payload = ::iroha_trigger::get_trigger_payload(); #fn_name(payload.owner, payload.event) } // NOTE: Host objects are always passed by value to wasm #[allow(clippy::needless_pass_by_value)] #(#attrs)* + #[inline] #vis #sig #block } diff --git a/wasm/trigger/derive/src/lib.rs b/smart_contract/trigger/derive/src/lib.rs similarity index 96% rename from wasm/trigger/derive/src/lib.rs rename to smart_contract/trigger/derive/src/lib.rs index 5c5a30e0397..71a0fb417c6 100644 --- a/wasm/trigger/derive/src/lib.rs +++ b/smart_contract/trigger/derive/src/lib.rs @@ -1,7 +1,5 @@ //! Crate with trigger procedural macros. -#![allow(clippy::panic)] - use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, parse_quote}; diff --git a/smart_contract/trigger/src/lib.rs b/smart_contract/trigger/src/lib.rs new file mode 100644 index 00000000000..84bb1d1f76a --- /dev/null +++ b/smart_contract/trigger/src/lib.rs @@ -0,0 +1,45 @@ +//! Iroha Trigger Rust SDK +#![no_std] +#![allow(unsafe_code)] + +#[cfg(not(test))] +use data_model::smart_contract::payloads; +pub use iroha_data_model as data_model; +pub use iroha_smart_contract as smart_contract; +pub use iroha_smart_contract_utils::debug; +#[cfg(not(test))] +use iroha_smart_contract_utils::decode_with_length_prefix_from_raw; +pub use iroha_trigger_derive::main; + +pub mod log { + //! WASM logging utilities + pub use iroha_smart_contract_utils::{debug, error, event, info, log::*, trace, warn}; +} + +#[cfg(not(test))] +mod host { + #[link(wasm_import_module = "iroha")] + extern "C" { + /// Get payload for trigger `main()` entrypoint. + /// + /// # Warning + /// + /// This function does transfer ownership of the result to the caller + pub(super) fn get_trigger_payload() -> *const u8; + } +} + +/// Get payload for trigger `main()` entrypoint. +#[cfg(not(test))] +pub fn get_trigger_payload() -> payloads::Trigger { + // Safety: ownership of the returned result is transferred into `_decode_from_raw` + unsafe { decode_with_length_prefix_from_raw(host::get_trigger_payload()) } +} + +pub mod prelude { + //! Common imports used by triggers + + pub use iroha_smart_contract::{data_model::prelude::*, prelude::*}; + pub use iroha_smart_contract_utils::debug::DebugUnwrapExt; + pub use iroha_trigger_derive::main; +} diff --git a/wasm/validator/derive/Cargo.toml b/smart_contract/utils/Cargo.toml similarity index 53% rename from wasm/validator/derive/Cargo.toml rename to smart_contract/utils/Cargo.toml index 19eb1ea27da..97f8a59d60d 100644 --- a/wasm/validator/derive/Cargo.toml +++ b/smart_contract/utils/Cargo.toml @@ -1,18 +1,19 @@ [package] -name = "iroha_validator_derive" +name = "iroha_smart_contract_utils" +edition.workspace = true version.workspace = true authors.workspace = true -edition.workspace = true license.workspace = true - -[lib] -proc-macro = true +[lints] +workspace = true [dependencies] iroha_data_model.workspace = true -syn.workspace = true -quote.workspace = true -proc-macro2.workspace = true + +parity-scale-codec.workspace = true + +[dev-dependencies] +webassembly-test = "0.1.0" diff --git a/wasm/src/debug.rs b/smart_contract/utils/src/debug.rs similarity index 91% rename from wasm/src/debug.rs rename to smart_contract/utils/src/debug.rs index 0f81f10336d..be59f7c0a2e 100644 --- a/wasm/src/debug.rs +++ b/smart_contract/utils/src/debug.rs @@ -2,9 +2,6 @@ use core::fmt::Debug; -#[cfg(feature = "debug")] -use super::*; - #[cfg(not(test))] mod host { #[cfg(feature = "debug")] @@ -35,7 +32,7 @@ pub fn dbg(_obj: &T) { #[allow(clippy::used_underscore_binding)] let s = format!("{:?}", _obj); // Safety: `host_dbg` doesn't take ownership of it's pointer parameter - unsafe { encode_and_execute(&s, host_dbg) } + unsafe { iroha_smart_contract_utils::encode_and_execute(&s, host_dbg) } } } @@ -45,7 +42,6 @@ pub fn dbg(_obj: &T) { /// /// # Panics /// Always -#[allow(clippy::panic)] pub fn dbg_panic(msg: &str) -> ! { dbg(msg); panic!() @@ -66,10 +62,8 @@ pub trait DebugUnwrapExt { impl DebugUnwrapExt for Result { type Output = T; - #[allow(clippy::panic)] fn dbg_unwrap(self) -> Self::Output { #[cfg(not(feature = "debug"))] - #[allow(clippy::unwrap_used)] return self.unwrap(); #[cfg(feature = "debug")] @@ -90,10 +84,8 @@ impl DebugUnwrapExt for Result { impl DebugUnwrapExt for Option { type Output = T; - #[allow(clippy::panic, clippy::single_match_else, clippy::option_if_let_else)] fn dbg_unwrap(self) -> Self::Output { #[cfg(not(feature = "debug"))] - #[allow(clippy::unwrap_used)] return self.unwrap(); #[cfg(feature = "debug")] @@ -169,15 +161,17 @@ mod tests { use webassembly_test::webassembly_test; - use crate::_decode_from_raw; - fn get_dbg_message() -> &'static str { "dbg_message" } #[no_mangle] pub unsafe extern "C" fn _dbg_mock(ptr: *const u8, len: usize) { - assert_eq!(_decode_from_raw::(ptr, len), get_dbg_message()); + use parity_scale_codec::DecodeAll; + + // can't use _decode_from_raw here, because we must NOT take the ownership + let bytes = core::slice::from_raw_parts(ptr, len); + assert_eq!(String::decode_all(&mut &*bytes).unwrap(), get_dbg_message()); } #[webassembly_test] diff --git a/smart_contract/utils/src/lib.rs b/smart_contract/utils/src/lib.rs new file mode 100644 index 00000000000..ec9f70a242e --- /dev/null +++ b/smart_contract/utils/src/lib.rs @@ -0,0 +1,120 @@ +//! Crate with utilities for implementing smart contract FFI +#![no_std] +#![allow(unsafe_code)] + +extern crate alloc; + +use alloc::{boxed::Box, format, vec::Vec}; +use core::ops::RangeFrom; + +use parity_scale_codec::{DecodeAll, Encode}; + +pub mod debug; +pub mod log; + +/// Decode the object from given pointer and length +/// +/// # Warning +/// +/// This method takes ownership of the given pointer +/// +/// # Safety +/// +/// It's safe to call this function as long as it's safe to construct, from the given +/// pointer, `Box<[u8]>` containing the encoded object +unsafe fn _decode_from_raw(ptr: *const u8, len: usize) -> T { + _decode_from_raw_in_range(ptr, len, 0..) +} + +/// Decode the object from given pointer and length in the given range +/// +/// # Warning +/// +/// This method takes ownership of the given pointer +/// +/// # Safety +/// +/// It's safe to call this function as long as it's safe to construct, from the given +/// pointer, `Box<[u8]>` containing the encoded object +unsafe fn _decode_from_raw_in_range( + ptr: *const u8, + len: usize, + range: RangeFrom, +) -> T { + let bytes = Box::from_raw(core::slice::from_raw_parts_mut(ptr.cast_mut(), len)); + + #[allow(clippy::expect_fun_call)] + T::decode_all(&mut &bytes[range]).expect( + format!( + "Decoding of {} failed. This is a bug", + core::any::type_name::() + ) + .as_str(), + ) +} + +/// Decode the object from given pointer where first element is the size of the object +/// following it. This can be considered a custom encoding format. +/// +/// # Warning +/// +/// This method takes ownership of the given pointer +/// +/// # Safety +/// +/// It's safe to call this function as long as it's safe to construct, from the given +/// pointer, byte array of prefix length and `Box<[u8]>` containing the encoded object +pub unsafe fn decode_with_length_prefix_from_raw(ptr: *const u8) -> T { + let len_size_bytes = core::mem::size_of::(); + + let len = usize::from_le_bytes( + core::slice::from_raw_parts(ptr, len_size_bytes) + .try_into() + .expect("Prefix length size(bytes) incorrect. This is a bug."), + ); + + _decode_from_raw_in_range(ptr, len, len_size_bytes..) +} + +/// Encode the given object and call the given function with the pointer and length of the allocation +/// +/// # Warning +/// +/// Ownership of the returned allocation is transfered to the caller +/// +/// # Safety +/// +/// The given function must not take ownership of the pointer argument +pub unsafe fn encode_and_execute( + obj: &T, + fun: unsafe extern "C" fn(*const u8, usize) -> O, +) -> O { + // NOTE: It's imperative that encoded object is stored on the heap + // because heap corresponds to linear memory when compiled to wasm + let bytes = obj.encode(); + + fun(bytes.as_ptr(), bytes.len()) +} + +/// Encode the given `val` as a vector of bytes with the size of the object at the beginning +// +// TODO: Write a separate crate for codec/protocol between Iroha and smartcontract +pub fn encode_with_length_prefix(val: &T) -> Box<[u8]> { + let len_size_bytes = core::mem::size_of::(); + + let mut r = Vec::with_capacity( + len_size_bytes + .checked_add(val.size_hint()) + .expect("Overflow during length computation"), + ); + + // Reserve space for length + r.resize(len_size_bytes, 0); + val.encode_to(&mut r); + + // Store length of the whole vector as byte array at the beginning of the vec + let len = r.len(); + r[..len_size_bytes].copy_from_slice(&len.to_le_bytes()); + + r.into_boxed_slice() +} diff --git a/wasm/src/log.rs b/smart_contract/utils/src/log.rs similarity index 91% rename from wasm/src/log.rs rename to smart_contract/utils/src/log.rs index 30cf22d3c58..bc1372ed86c 100644 --- a/wasm/src/log.rs +++ b/smart_contract/utils/src/log.rs @@ -3,6 +3,7 @@ pub use iroha_data_model::Level; use super::*; + #[cfg(not(test))] mod host { #[link(wasm_import_module = "iroha")] @@ -88,7 +89,6 @@ mod tests { use webassembly_test::webassembly_test; use super::*; - use crate::_decode_from_raw; fn get_log_message() -> &'static str { "log_message" @@ -96,7 +96,9 @@ mod tests { #[no_mangle] pub unsafe extern "C" fn _log_mock(ptr: *const u8, len: usize) { - let (log_level, msg) = _decode_from_raw::<(u8, String)>(ptr, len); + // can't use _decode_from_raw here, because we must NOT take the ownership + let bytes = core::slice::from_raw_parts(ptr, len); + let (log_level, msg) = <(u8, String)>::decode_all(&mut &*bytes).unwrap(); assert_eq!(log_level, 3); assert_eq!(msg, get_log_message()); } diff --git a/substrate/Cargo.toml b/substrate/Cargo.toml index f1ce840ced3..184b9e572ac 100644 --- a/substrate/Cargo.toml +++ b/substrate/Cargo.toml @@ -7,4 +7,7 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [dependencies] diff --git a/telemetry/Cargo.toml b/telemetry/Cargo.toml index d63dd4f830f..4289c80e87d 100644 --- a/telemetry/Cargo.toml +++ b/telemetry/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [features] # Support developer-specific telemetry. # Should not be enabled on production builds. @@ -21,7 +24,7 @@ iroha_futures = { workspace = true, features = ["telemetry"] } iroha_telemetry_derive = { workspace = true } async-trait = { workspace = true } -chrono = "0.4.23" +chrono = "0.4.31" eyre = { workspace = true } futures = { workspace = true, features = ["std", "async-await"] } serde_json = { workspace = true } @@ -32,9 +35,13 @@ tokio-stream = { workspace = true, features = ["fs"] } tokio-tungstenite = { workspace = true } url = { workspace = true, features = ["serde"] } prometheus = { workspace = true } - +parity-scale-codec = { workspace = true } [build-dependencies] eyre = { workspace = true } vergen = { workspace = true, features = ["cargo", "git", "gitoxide"] } +[dev-dependencies] +expect-test = { workspace = true } +hex = { workspace = true } + diff --git a/telemetry/derive/Cargo.toml b/telemetry/derive/Cargo.toml index 6348a5683a7..b2c53970a87 100644 --- a/telemetry/derive/Cargo.toml +++ b/telemetry/derive/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true diff --git a/telemetry/derive/src/lib.rs b/telemetry/derive/src/lib.rs index 44e50792061..779b3cdba0b 100644 --- a/telemetry/derive/src/lib.rs +++ b/telemetry/derive/src/lib.rs @@ -1,8 +1,6 @@ //! Attribute-like macro for instrumenting `isi` for `prometheus` //! metrics. See [`macro@metrics`] for more details. -#![allow(clippy::std_instead_of_core)] - use proc_macro::TokenStream; #[cfg(feature = "metric-instrumentation")] use proc_macro2::TokenStream as TokenStream2; @@ -30,7 +28,6 @@ fn type_has_metrics_field(ty: &Type) -> bool { // more than one way. Type::Path(pth) => { let Path { segments, .. } = pth.path.clone(); - #[allow(clippy::expect_used)] let type_name = &segments .last() .expect("Should have at least one segment") @@ -114,7 +111,7 @@ impl Parse for MetricSpec { impl ToTokens for MetricSpec { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { - self.metric_name.to_tokens(tokens) + self.metric_name.to_tokens(tokens); } } @@ -150,7 +147,6 @@ impl ToTokens for MetricSpec { /// ``` #[proc_macro_error] #[proc_macro_attribute] -#[allow(clippy::str_to_string)] pub fn metrics(attr: TokenStream, item: TokenStream) -> TokenStream { let ItemFn { attrs, @@ -172,7 +168,6 @@ pub fn metrics(attr: TokenStream, item: TokenStream) -> TokenStream { syn::ReturnType::Type(_, typ) => match *typ { Type::Path(pth) => { let Path { segments, .. } = pth.path; - #[allow(clippy::expect_used)] let type_name = &segments.last().expect("non-empty path").ident; if *type_name != "Result" { abort!( diff --git a/telemetry/derive/tests/ui_fail/args_no_wsv.rs b/telemetry/derive/tests/ui_fail/args_no_wsv.rs index 388231c5a8b..d85e46478e6 100644 --- a/telemetry/derive/tests/ui_fail/args_no_wsv.rs +++ b/telemetry/derive/tests/ui_fail/args_no_wsv.rs @@ -1,4 +1,3 @@ -use iroha_core::wsv::WorldStateView; use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing")] @@ -7,6 +6,5 @@ fn execute(_wsv: &World) -> Result<(), ()> { } fn main() { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let _world = WorldStateView::new(iroha_core::prelude::World::default(), kura); + } diff --git a/telemetry/derive/tests/ui_fail/args_no_wsv.stderr b/telemetry/derive/tests/ui_fail/args_no_wsv.stderr index 4e229217d34..4aa2e1da1c3 100644 --- a/telemetry/derive/tests/ui_fail/args_no_wsv.stderr +++ b/telemetry/derive/tests/ui_fail/args_no_wsv.stderr @@ -1,5 +1,5 @@ error: At least one argument must be a `WorldStateView`. - --> tests/ui_fail/args_no_wsv.rs:5:12 + --> tests/ui_fail/args_no_wsv.rs:4:12 | -5 | fn execute(_wsv: &World) -> Result<(), ()> { +4 | fn execute(_wsv: &World) -> Result<(), ()> { | ^^^^^^^^^^^^ diff --git a/telemetry/derive/tests/ui_fail/bare_spec.rs b/telemetry/derive/tests/ui_fail/bare_spec.rs index 5ea32d7fb4e..bb6029fddf1 100644 --- a/telemetry/derive/tests/ui_fail/bare_spec.rs +++ b/telemetry/derive/tests/ui_fail/bare_spec.rs @@ -1,4 +1,3 @@ -use iroha_core::wsv::WorldStateView; use iroha_telemetry_derive::metrics; #[metrics(test_query, "another_test_query_without_timing")] @@ -7,6 +6,4 @@ fn execute(wsv: &WorldStateView) -> Result<(), ()> { } fn main() { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let _world = WorldStateView::new(iroha_core::prelude::World::default(), kura); } diff --git a/telemetry/derive/tests/ui_fail/bare_spec.stderr b/telemetry/derive/tests/ui_fail/bare_spec.stderr index 72c5f11a848..0bb06d93ac6 100644 --- a/telemetry/derive/tests/ui_fail/bare_spec.stderr +++ b/telemetry/derive/tests/ui_fail/bare_spec.stderr @@ -1,5 +1,5 @@ error: expected literal - --> tests/ui_fail/bare_spec.rs:4:11 + --> tests/ui_fail/bare_spec.rs:3:11 | -4 | #[metrics(test_query, "another_test_query_without_timing")] +3 | #[metrics(test_query, "another_test_query_without_timing")] | ^^^^^^^^^^ diff --git a/telemetry/derive/tests/ui_fail/doubled_plus.rs b/telemetry/derive/tests/ui_fail/doubled_plus.rs index 58fd7eae068..61db9e0dda1 100644 --- a/telemetry/derive/tests/ui_fail/doubled_plus.rs +++ b/telemetry/derive/tests/ui_fail/doubled_plus.rs @@ -1,4 +1,3 @@ -use iroha_core::wsv::WorldStateView; use iroha_telemetry_derive::metrics; #[metrics(+"test_query", ++"another_test_query_without_timing")] @@ -7,6 +6,5 @@ fn execute(wsv: &WorldStateView) -> Result<(), ()> { } fn main() { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let _world = WorldStateView::new(iroha_core::prelude::World::default(), kura); + } diff --git a/telemetry/derive/tests/ui_fail/doubled_plus.stderr b/telemetry/derive/tests/ui_fail/doubled_plus.stderr index becb8bf32c5..751d4f27b17 100644 --- a/telemetry/derive/tests/ui_fail/doubled_plus.stderr +++ b/telemetry/derive/tests/ui_fail/doubled_plus.stderr @@ -1,5 +1,5 @@ error: expected literal - --> tests/ui_fail/doubled_plus.rs:4:27 + --> tests/ui_fail/doubled_plus.rs:3:27 | -4 | #[metrics(+"test_query", ++"another_test_query_without_timing")] +3 | #[metrics(+"test_query", ++"another_test_query_without_timing")] | ^ diff --git a/telemetry/derive/tests/ui_fail/no_args.rs b/telemetry/derive/tests/ui_fail/no_args.rs index 00a0f60dd1f..73c27db3bab 100644 --- a/telemetry/derive/tests/ui_fail/no_args.rs +++ b/telemetry/derive/tests/ui_fail/no_args.rs @@ -1,4 +1,3 @@ -use iroha_core::wsv::WorldStateView; use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing")] @@ -7,6 +6,4 @@ fn execute() -> Result<(), ()> { } fn main() { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let _world = WorldStateView::new(iroha_core::prelude::World::default(), kura); } diff --git a/telemetry/derive/tests/ui_fail/no_args.stderr b/telemetry/derive/tests/ui_fail/no_args.stderr index 5b1e88c34d2..bf2d6e9b557 100644 --- a/telemetry/derive/tests/ui_fail/no_args.stderr +++ b/telemetry/derive/tests/ui_fail/no_args.stderr @@ -1,5 +1,5 @@ error: Function must have at least one argument of type `WorldStateView`. - --> tests/ui_fail/no_args.rs:5:1 + --> tests/ui_fail/no_args.rs:4:1 | -5 | fn execute() -> Result<(), ()> { +4 | fn execute() -> Result<(), ()> { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/telemetry/derive/tests/ui_fail/non_snake_case_name.rs b/telemetry/derive/tests/ui_fail/non_snake_case_name.rs index 065cf621c56..97c83ab152f 100644 --- a/telemetry/derive/tests/ui_fail/non_snake_case_name.rs +++ b/telemetry/derive/tests/ui_fail/non_snake_case_name.rs @@ -1,5 +1,3 @@ -#![allow(unused_imports)] // Unused because macro will no generate anything -use iroha_core::wsv::WorldStateView; use iroha_telemetry_derive::metrics; #[metrics(+"test query", "another_test_query_without_timing")] diff --git a/telemetry/derive/tests/ui_fail/non_snake_case_name.stderr b/telemetry/derive/tests/ui_fail/non_snake_case_name.stderr index 2b2d9f0ee51..6bb8fe44028 100644 --- a/telemetry/derive/tests/ui_fail/non_snake_case_name.stderr +++ b/telemetry/derive/tests/ui_fail/non_snake_case_name.stderr @@ -1,7 +1,7 @@ error: Spaces are not allowed. Use underscores '_' - --> tests/ui_fail/non_snake_case_name.rs:5:1 + --> tests/ui_fail/non_snake_case_name.rs:3:1 | -5 | #[metrics(+"test query", "another_test_query_without_timing")] +3 | #[metrics(+"test query", "another_test_query_without_timing")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: this error originates in the attribute macro `metrics` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/telemetry/derive/tests/ui_fail/not_execute.rs b/telemetry/derive/tests/ui_fail/not_execute.rs index 4d300ed88fa..7a63c17d08f 100644 --- a/telemetry/derive/tests/ui_fail/not_execute.rs +++ b/telemetry/derive/tests/ui_fail/not_execute.rs @@ -1,4 +1,3 @@ -use iroha_core::wsv::{World, WorldStateView}; use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing")] @@ -7,7 +6,5 @@ fn exequte(wsv: &WorldStateView) -> Result<(), ()> { } fn main() { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let _something: World = World::default(); - let _world = WorldStateView::new(_something, kura); + } diff --git a/telemetry/derive/tests/ui_fail/not_execute.stderr b/telemetry/derive/tests/ui_fail/not_execute.stderr index 4146a4ab62b..7f4ab728dae 100644 --- a/telemetry/derive/tests/ui_fail/not_execute.stderr +++ b/telemetry/derive/tests/ui_fail/not_execute.stderr @@ -1,5 +1,5 @@ error: Function should be an `impl execute` - --> tests/ui_fail/not_execute.rs:5:4 + --> tests/ui_fail/not_execute.rs:4:4 | -5 | fn exequte(wsv: &WorldStateView) -> Result<(), ()> { +4 | fn exequte(wsv: &WorldStateView) -> Result<(), ()> { | ^^^^^^^ diff --git a/telemetry/derive/tests/ui_fail/not_return_result.rs b/telemetry/derive/tests/ui_fail/not_return_result.rs index 18fbf19d0ca..ca779d8e5ec 100644 --- a/telemetry/derive/tests/ui_fail/not_return_result.rs +++ b/telemetry/derive/tests/ui_fail/not_return_result.rs @@ -1,4 +1,3 @@ -use iroha_core::wsv::{World, WorldStateView}; use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing")] @@ -7,8 +6,6 @@ fn execute(_wsv: &WorldStateView) -> iroha_core::RESULT { } fn main() { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let _something: World = World::default(); - let _world = WorldStateView::new(_something, kura); + } diff --git a/telemetry/derive/tests/ui_fail/not_return_result.stderr b/telemetry/derive/tests/ui_fail/not_return_result.stderr index 37ef12869a2..6652f72014d 100644 --- a/telemetry/derive/tests/ui_fail/not_return_result.stderr +++ b/telemetry/derive/tests/ui_fail/not_return_result.stderr @@ -1,5 +1,5 @@ error: Should return `Result`. Found RESULT - --> tests/ui_fail/not_return_result.rs:5:50 + --> tests/ui_fail/not_return_result.rs:4:50 | -5 | fn execute(_wsv: &WorldStateView) -> iroha_core::RESULT { +4 | fn execute(_wsv: &WorldStateView) -> iroha_core::RESULT { | ^^^^^^ diff --git a/telemetry/derive/tests/ui_fail/return_nothing.rs b/telemetry/derive/tests/ui_fail/return_nothing.rs index 759960622a6..419325ac0ba 100644 --- a/telemetry/derive/tests/ui_fail/return_nothing.rs +++ b/telemetry/derive/tests/ui_fail/return_nothing.rs @@ -1,4 +1,3 @@ -use iroha_core::wsv::WorldStateView; use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing")] @@ -7,7 +6,5 @@ fn execute(wsv: &WorldStateView) { } fn main() { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let _something: iroha_core::wsv::World = iroha_core::wsv::World::default(); - let _world = WorldStateView::new(_something, kura); + } diff --git a/telemetry/derive/tests/ui_fail/return_nothing.stderr b/telemetry/derive/tests/ui_fail/return_nothing.stderr index f92d23ee205..93385e20c54 100644 --- a/telemetry/derive/tests/ui_fail/return_nothing.stderr +++ b/telemetry/derive/tests/ui_fail/return_nothing.stderr @@ -1,7 +1,7 @@ error: `Fn` must return `Result`. Returns nothing instead. - --> tests/ui_fail/return_nothing.rs:4:1 + --> tests/ui_fail/return_nothing.rs:3:1 | -4 | #[metrics(+"test_query", "another_test_query_without_timing")] +3 | #[metrics(+"test_query", "another_test_query_without_timing")] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: this error originates in the attribute macro `metrics` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/telemetry/derive/tests/ui_fail/trailing_plus.rs b/telemetry/derive/tests/ui_fail/trailing_plus.rs index c692d625ae1..3034f0c7f1d 100644 --- a/telemetry/derive/tests/ui_fail/trailing_plus.rs +++ b/telemetry/derive/tests/ui_fail/trailing_plus.rs @@ -1,4 +1,3 @@ -use iroha_core::wsv::WorldStateView; use iroha_telemetry_derive::metrics; #[metrics(+"test_query", "another_test_query_without_timing"+)] @@ -7,7 +6,5 @@ fn execute(wsv: &WorldStateView) -> Result<(), ()> { } fn main() { - let kura = iroha_core::kura::Kura::blank_kura_for_testing(); - let _something: iroha_core::wsv::World = iroha_core::wsv::World::default(); - let _world = WorldStateView::new(_something, kura); + } diff --git a/telemetry/derive/tests/ui_fail/trailing_plus.stderr b/telemetry/derive/tests/ui_fail/trailing_plus.stderr index f613692ad1c..6378d3ff10d 100644 --- a/telemetry/derive/tests/ui_fail/trailing_plus.stderr +++ b/telemetry/derive/tests/ui_fail/trailing_plus.stderr @@ -1,5 +1,5 @@ error: expected `,` - --> tests/ui_fail/trailing_plus.rs:4:61 + --> tests/ui_fail/trailing_plus.rs:3:61 | -4 | #[metrics(+"test_query", "another_test_query_without_timing"+)] +3 | #[metrics(+"test_query", "another_test_query_without_timing"+)] | ^ diff --git a/telemetry/src/futures.rs b/telemetry/src/futures.rs index a18b9abec24..df723f38406 100644 --- a/telemetry/src/futures.rs +++ b/telemetry/src/futures.rs @@ -1,5 +1,4 @@ //! Module with telemetry future telemetry processing -#![allow(clippy::std_instead_of_core)] use std::{collections::HashMap, marker::Unpin, time::Duration}; use iroha_futures::FuturePollTelemetry; @@ -11,8 +10,6 @@ use tokio_stream::{Stream, StreamExt}; pub mod post_process { //! Module with telemetry post processing - #![allow(clippy::unwrap_used, clippy::fallible_impl_from)] - use super::*; /// Post processed info of function @@ -34,6 +31,7 @@ pub mod post_process { pub max: f64, } + #[allow(clippy::fallible_impl_from)] impl From<(String, HashMap>)> for PostProcessedInfo { fn from((name, entries): (String, HashMap>)) -> Self { let iter = entries diff --git a/telemetry/src/metrics.rs b/telemetry/src/metrics.rs index a1fa50b8a4e..1043cbb9954 100644 --- a/telemetry/src/metrics.rs +++ b/telemetry/src/metrics.rs @@ -1,11 +1,11 @@ //! [`Metrics`] and [`Status`]-related logic and functions. -#![allow(clippy::std_instead_of_core, clippy::arithmetic_side_effects)] use std::{ ops::Deref, time::{Duration, SystemTime}, }; +use parity_scale_codec::{Compact, Encode}; use prometheus::{ core::{AtomicU64, GenericGauge, GenericGaugeVec}, Encoder, Histogram, HistogramOpts, HistogramVec, IntCounter, IntCounterVec, Opts, Registry, @@ -22,22 +22,38 @@ impl Default for Uptime { } } +impl Encode for Uptime { + fn encode(&self) -> Vec { + let secs = self.0.as_secs(); + let nanos = self.0.subsec_nanos(); + // While seconds are rarely very large, nanos could be anywhere between zero and one billion, + // eliminating the profit of Compact + (Compact(secs), nanos).encode() + } +} + /// Response body for GET status request -#[derive(Clone, Copy, Debug, Default, Deserialize, Serialize)] +#[derive(Clone, Copy, Debug, Default, Deserialize, Serialize, Encode)] pub struct Status { /// Number of connected peers, except for the reporting peer itself + #[codec(compact)] pub peers: u64, /// Number of committed blocks + #[codec(compact)] pub blocks: u64, /// Number of accepted transactions + #[codec(compact)] pub txs_accepted: u64, /// Number of rejected transactions + #[codec(compact)] pub txs_rejected: u64, /// Uptime since genesis block creation pub uptime: Uptime, /// Number of view changes in the current round + #[codec(compact)] pub view_changes: u64, /// Number of the transactions in the queue + #[codec(compact)] pub queue_size: u64, } @@ -165,7 +181,6 @@ impl Default for Metrics { block_height, connected_peers, uptime_since_genesis_ms, - registry, domains, accounts, tx_amounts, @@ -174,6 +189,7 @@ impl Default for Metrics { view_changes, queue_size, dropped_messages, + registry, } } } @@ -207,6 +223,7 @@ impl Metrics { #[cfg(test)] mod test { #![allow(clippy::restriction)] + use super::*; #[test] @@ -221,4 +238,51 @@ mod test { println!("{:?}", Status::from(&Box::new(metrics))); println!("{:?}", Status::default()); } + + fn sample_status() -> Status { + Status { + peers: 4, + blocks: 5, + txs_accepted: 31, + txs_rejected: 3, + uptime: Uptime(Duration::new(5, 937_000_000)), + view_changes: 2, + queue_size: 18, + } + } + + #[test] + fn serialize_status_json() { + let value = sample_status(); + + let actual = serde_json::to_string_pretty(&value).expect("Sample is valid"); + // CAUTION: if this is outdated, make sure to update the documentation: + // https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints#status + let expected = expect_test::expect![[r#" + { + "peers": 4, + "blocks": 5, + "txs_accepted": 31, + "txs_rejected": 3, + "uptime": { + "secs": 5, + "nanos": 937000000 + }, + "view_changes": 2, + "queue_size": 18 + }"#]]; + expected.assert_eq(&actual); + } + + #[test] + fn serialize_status_scale() { + let value = sample_status(); + let bytes = value.encode(); + + let actual = hex::encode_upper(bytes); + // CAUTION: if this is outdated, make sure to update the documentation: + // https://hyperledger.github.io/iroha-2-docs/api/torii-endpoints#status + let expected = expect_test::expect!["10147C0C14407CD9370848"]; + expected.assert_eq(&actual); + } } diff --git a/telemetry/src/retry_period.rs b/telemetry/src/retry_period.rs index 16b834c2bd1..b27d1b7d7fa 100644 --- a/telemetry/src/retry_period.rs +++ b/telemetry/src/retry_period.rs @@ -39,8 +39,6 @@ impl RetryPeriod { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] - #[test] fn increase_exponent_saturates() { let mut period = super::RetryPeriod { diff --git a/telemetry/src/ws.rs b/telemetry/src/ws.rs index d72179c4174..67579104b91 100644 --- a/telemetry/src/ws.rs +++ b/telemetry/src/ws.rs @@ -1,5 +1,4 @@ //! Telemetry sent to a server -#![allow(clippy::std_instead_of_core, clippy::std_instead_of_alloc)] use std::time::Duration; use chrono::Local; @@ -85,7 +84,6 @@ where ) { let mut stream = ReceiverStream::new(receiver).fuse(); let mut internal_stream = ReceiverStream::new(internal_receiver).fuse(); - #[allow(clippy::restriction)] loop { tokio::select! { msg = stream.next() => { @@ -260,7 +258,6 @@ impl SinkFactory for WebsocketSinkFactory { } } -#[allow(clippy::unwrap_used, clippy::expect_used, clippy::panic)] #[cfg(test)] mod tests { use std::{ diff --git a/tools/kagami/Cargo.toml b/tools/kagami/Cargo.toml index 8d3830559c9..7a4d4145ecf 100644 --- a/tools/kagami/Cargo.toml +++ b/tools/kagami/Cargo.toml @@ -9,6 +9,9 @@ description = "A tool used to generate cryptographic keys, docs, the schema and license.workspace = true +[lints] +workspace = true + [dependencies] iroha_crypto = { workspace = true } iroha_config = { workspace = true } diff --git a/tools/kagami/src/config.rs b/tools/kagami/src/config.rs index 2479ffc7994..2d33d5b4a4b 100644 --- a/tools/kagami/src/config.rs +++ b/tools/kagami/src/config.rs @@ -30,7 +30,7 @@ impl RunArgs for Args { mod client { use iroha_config::{ client::{BasicAuth, ConfigurationProxy, WebLogin}, - torii::{uri::DEFAULT_API_ADDR, DEFAULT_TORII_TELEMETRY_ADDR}, + torii::uri::DEFAULT_API_ADDR, }; use super::*; @@ -42,7 +42,6 @@ mod client { fn run(self, writer: &mut BufWriter) -> Outcome { let config = ConfigurationProxy { torii_api_url: Some(format!("http://{DEFAULT_API_ADDR}").parse()?), - torii_telemetry_url: Some(format!("http://{DEFAULT_TORII_TELEMETRY_ADDR}").parse()?), account_id: Some("alice@wonderland".parse()?), basic_auth: Some(Some(BasicAuth { web_login: WebLogin::new("mad_hatter")?, @@ -53,7 +52,7 @@ mod client { )?), private_key: Some(PrivateKey::from_hex( Algorithm::Ed25519, - "9AC47ABF59B356E0BD7DCBBBB4DEC080E302156A48CA907E47CB6AEA1D32719E7233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0".as_ref() + "9AC47ABF59B356E0BD7DCBBBB4DEC080E302156A48CA907E47CB6AEA1D32719E7233BFC89DCBD68C19FDE6CE6158225298EC1131B6A130D1AEB454C1AB5183C0" )?), ..ConfigurationProxy::default() } diff --git a/tools/kagami/src/docs.rs b/tools/kagami/src/docs.rs index cf548b5debb..737959c5aef 100644 --- a/tools/kagami/src/docs.rs +++ b/tools/kagami/src/docs.rs @@ -1,9 +1,3 @@ -#![allow(clippy::panic_in_result_fn, clippy::expect_used)] -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use std::{fmt::Debug, io::Write}; use color_eyre::eyre::WrapErr as _; @@ -47,7 +41,7 @@ where and that it only serves as a reference. If a default for such a type has a `null` value, it means that there is no meaningful fallback \ available for this particular value.\n\nAll the default values can be freely obtained from a provided [sample configuration file](../../../configs/peer/config.json), \ but it should only serve as a starting point. If left unchanged, the sample configuration file would still fail to build due to it having `null` in place of \ - [public](#public_key) and [private](#private_key) keys as well as [endpoint](#torii.api_url) [URLs](#torii.telemetry_url). \ + [public](#public_key) and [private](#private_key) keys as well as [API endpoint URL](#torii.api_url). \ These should be provided either by modifying the sample config file or as environment variables. \ No other overloading of configuration values happens besides reading them from a file and capturing the environment variables.\n\n\ For both types of configuration options wrapped in a single `Option<..>` (i.e. both those that have meaningful defaults and those that have `null`), \ diff --git a/tools/kagami/src/genesis.rs b/tools/kagami/src/genesis.rs index 22c64d14053..41ff4c71237 100644 --- a/tools/kagami/src/genesis.rs +++ b/tools/kagami/src/genesis.rs @@ -4,37 +4,37 @@ use clap::{ArgGroup, Parser, Subcommand}; use iroha_config::{sumeragi::default::*, wasm::default::*, wsv::default::*}; use iroha_data_model::{ asset::AssetValueType, - isi::{MintBox, RegisterBox}, + isi::{MintExpr, RegisterExpr}, metadata::Limits, parameter::{default::*, ParametersBuilder}, prelude::AssetId, IdBox, }; -use iroha_genesis::{RawGenesisBlock, RawGenesisBlockBuilder, ValidatorMode, ValidatorPath}; +use iroha_genesis::{ExecutorMode, ExecutorPath, RawGenesisBlock, RawGenesisBlockBuilder}; use serde_json::json; use super::*; -const INLINED_VALIDATOR_WARNING: &str = r#"WARN: You're using genesis with inlined validator. -Consider specifying a separate validator file using `--validator-path-in-genesis` instead. +const INLINED_EXECUTOR_WARNING: &str = r#"WARN: You're using genesis with inlined executor. +Consider specifying a separate executor file using `--executor-path-in-genesis` instead. Use `--help` for more information."#; #[derive(Parser, Debug, Clone)] -#[clap(group = ArgGroup::new("validator").required(true))] +#[clap(group = ArgGroup::new("executor").required(true))] pub struct Args { - /// Reads the validator from the file at (relative to CWD) + /// Reads the executor from the file at (relative to CWD) /// and includes the content into the genesis. /// /// WARN: This approach can lead to reproducibility issues, as WASM builds are currently not - /// guaranteed to be reproducible. Additionally, inlining the validator bloats the genesis JSON - /// and makes it less readable. Consider specifying a separate validator file - /// using `--validator-path-in-genesis` instead. For more details, refer to + /// guaranteed to be reproducible. Additionally, inlining the executor bloats the genesis JSON + /// and makes it less readable. Consider specifying a separate executor file + /// using `--executor-path-in-genesis` instead. For more details, refer to /// the related PR: https://github.com/hyperledger/iroha/pull/3434 - #[clap(long, group = "validator", value_name = "PATH")] - inline_validator_from_file: Option, + #[clap(long, group = "executor", value_name = "PATH")] + inline_executor_from_file: Option, /// Specifies the that will be directly inserted into the genesis JSON as-is. - #[clap(long, group = "validator", value_name = "PATH")] - validator_path_in_genesis: Option, + #[clap(long, group = "executor", value_name = "PATH")] + executor_path_in_genesis: Option, #[clap(subcommand)] mode: Option, } @@ -67,53 +67,52 @@ pub enum Mode { impl RunArgs for Args { fn run(self, writer: &mut BufWriter) -> Outcome { let Self { - inline_validator_from_file, - validator_path_in_genesis, + inline_executor_from_file, + executor_path_in_genesis, mode, } = self; - let validator: ValidatorMode = - match (inline_validator_from_file, validator_path_in_genesis) { - (Some(path), None) => { - eprintln!("{INLINED_VALIDATOR_WARNING}"); - ParsedValidatorArgs::Inline(path) - } - (None, Some(path)) => ParsedValidatorArgs::Path(path), - _ => unreachable!("clap invariant"), + let executor: ExecutorMode = match (inline_executor_from_file, executor_path_in_genesis) { + (Some(path), None) => { + eprintln!("{INLINED_EXECUTOR_WARNING}"); + ParsedExecutorArgs::Inline(path) } - .try_into()?; + (None, Some(path)) => ParsedExecutorArgs::Path(path), + _ => unreachable!("clap invariant"), + } + .try_into()?; let genesis = match mode.unwrap_or_default() { - Mode::Default => generate_default(validator), + Mode::Default => generate_default(executor), Mode::Synthetic { domains, accounts_per_domain, assets_per_domain, - } => generate_synthetic(validator, domains, accounts_per_domain, assets_per_domain), + } => generate_synthetic(executor, domains, accounts_per_domain, assets_per_domain), }?; writeln!(writer, "{}", serde_json::to_string_pretty(&genesis)?) .wrap_err("Failed to write serialized genesis to the buffer.") } } -enum ParsedValidatorArgs { +enum ParsedExecutorArgs { Inline(PathBuf), Path(PathBuf), } -impl TryFrom for ValidatorMode { +impl TryFrom for ExecutorMode { type Error = color_eyre::Report; - fn try_from(value: ParsedValidatorArgs) -> Result { + fn try_from(value: ParsedExecutorArgs) -> Result { let mode = match value { - ParsedValidatorArgs::Path(path) => ValidatorMode::Path(ValidatorPath(path)), - ParsedValidatorArgs::Inline(path) => { - let validator = ValidatorMode::Path(ValidatorPath(path.clone())) + ParsedExecutorArgs::Path(path) => ExecutorMode::Path(ExecutorPath(path)), + ParsedExecutorArgs::Inline(path) => { + let executor = ExecutorMode::Path(ExecutorPath(path.clone())) .try_into() .wrap_err_with(|| { - format!("Failed to read the validator located at {}", path.display()) + format!("Failed to read the executor located at {}", path.display()) })?; - ValidatorMode::Inline(validator) + ExecutorMode::Inline(executor) } }; Ok(mode) @@ -121,7 +120,7 @@ impl TryFrom for ValidatorMode { } #[allow(clippy::too_many_lines)] -pub fn generate_default(validator: ValidatorMode) -> color_eyre::Result { +pub fn generate_default(executor: ExecutorMode) -> color_eyre::Result { let mut meta = Metadata::new(); meta.insert_with_limits( "key".parse()?, @@ -129,7 +128,7 @@ pub fn generate_default(validator: ValidatorMode) -> color_eyre::Result color_eyre::Result::Id::from_str("alice@wonderland")?; - let mint = MintBox::new( + let alice_id = AccountId::from_str("alice@wonderland")?; + let mint = MintExpr::new( 13_u32.to_value(), IdBox::AssetId(AssetId::new("rose#wonderland".parse()?, alice_id.clone())), ); - let mint_cabbage = MintBox::new( + let mint_cabbage = MintExpr::new( 44_u32.to_value(), IdBox::AssetId(AssetId::new( "cabbage#garden_of_live_flowers".parse()?, alice_id.clone(), )), ); - let grant_permission_to_set_parameters = GrantBox::new( + let grant_permission_to_set_parameters = GrantExpr::new( PermissionToken::new("CanSetParameters".parse()?, &json!(null)), alice_id.clone(), ); - let register_user_metadata_access = RegisterBox::new( + let register_user_metadata_access = RegisterExpr::new( Role::new("ALICE_METADATA_ACCESS".parse()?) .add_permission(PermissionToken::new( "CanSetKeyValueInUserAccount".parse()?, @@ -209,13 +208,13 @@ pub fn generate_default(validator: ValidatorMode) -> color_eyre::Result color_eyre::Result { // Add default `Domain` and `Account` to still be able to query - let mut builder = RawGenesisBlockBuilder::new() + let mut builder = RawGenesisBlockBuilder::default() .domain("wonderland".parse()?) .account("alice".parse()?, crate::DEFAULT_PUBLIC_KEY.parse()?) .finish_domain(); @@ -236,7 +235,7 @@ fn generate_synthetic( builder = domain_builder.finish_domain(); } - let mut genesis = builder.validator(validator).build(); + let mut genesis = builder.executor(executor).build(); let first_transaction = genesis .first_transaction_mut() @@ -246,7 +245,7 @@ fn generate_synthetic( // FIXME: it actually generates (assets_per_domain * accounts_per_domain) assets per domain // https://github.com/hyperledger/iroha/issues/3508 for asset in 0..assets_per_domain { - let mint = MintBox::new( + let mint = MintExpr::new( 13_u32.to_value(), IdBox::AssetId(AssetId::new( format!("asset_{asset}#domain_{domain}").parse()?, diff --git a/tools/kagami/src/main.rs b/tools/kagami/src/main.rs index b869fbd8c20..f4ed9d40733 100644 --- a/tools/kagami/src/main.rs +++ b/tools/kagami/src/main.rs @@ -1,11 +1,6 @@ //! CLI for generating iroha sample configuration, genesis and //! cryptographic key pairs. To be used with all compliant Iroha //! installations. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use std::{ io::{stdout, BufWriter, Write}, str::FromStr as _, @@ -27,7 +22,7 @@ pub(crate) type Outcome = color_eyre::Result<()>; // The reason for hard-coding this default is to ensure that the // algorithm is matched to the public key in Ed25519 format. If // you need to change either, you should definitely change both. -pub const DEFAULT_PUBLIC_KEY: &str = +const DEFAULT_PUBLIC_KEY: &str = "ed01207233bfc89dcbd68c19fde6ce6158225298ec1131b6a130d1aeb454c1ab5183c0"; fn main() -> Outcome { @@ -38,7 +33,7 @@ fn main() -> Outcome { } /// Trait to encapsulate common attributes of the commands and sub-commands. -pub trait RunArgs { +trait RunArgs { /// Run the given command. /// /// # Errors @@ -50,7 +45,7 @@ pub trait RunArgs { /// shipped with Iroha. #[derive(Parser, Debug)] #[command(name = "kagami", version, author)] -pub enum Args { +enum Args { /// Generate cryptographic key pairs using the given algorithm and either private key or seed Crypto(Box), /// Generate the schema used for code generation in Iroha SDKs diff --git a/tools/kura_inspector/Cargo.toml b/tools/kura_inspector/Cargo.toml index 8833be01cc0..31854303380 100644 --- a/tools/kura_inspector/Cargo.toml +++ b/tools/kura_inspector/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [dependencies] iroha_core = { workspace = true } iroha_version = { workspace = true } diff --git a/tools/kura_inspector/src/main.rs b/tools/kura_inspector/src/main.rs index 1840558e05c..c88e2ac7849 100644 --- a/tools/kura_inspector/src/main.rs +++ b/tools/kura_inspector/src/main.rs @@ -1,14 +1,9 @@ //! Kura inspector binary. For usage run with `--help`. -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] use std::path::{Path, PathBuf}; use clap::{Parser, Subcommand}; use iroha_core::kura::{BlockIndex, BlockStore, LockStatus}; -use iroha_data_model::block::VersionedCommittedBlock; +use iroha_data_model::block::SignedBlock; use iroha_version::scale::DecodeVersioned; /// Kura inspector @@ -36,7 +31,6 @@ enum Command { }, } -#[allow(clippy::use_debug, clippy::print_stderr, clippy::panic)] fn main() { let args = Args::parse(); @@ -55,12 +49,6 @@ fn main() { } } -#[allow( - clippy::print_stdout, - clippy::use_debug, - clippy::expect_used, - clippy::expect_fun_call -)] fn print_blockchain(block_store_path: &Path, from_height: u64, block_count: u64) { let mut block_store_path: std::borrow::Cow<'_, Path> = block_store_path.into(); @@ -135,9 +123,9 @@ fn print_blockchain(block_store_path: &Path, from_height: u64, block_count: u64) vec![0_u8; usize::try_from(idx.length).expect("index_len didn't fit in 32-bits")]; block_store .read_block_data(idx.start, &mut block_buf) - .expect(&format!("Failed to read block â„– {} data.", meta_index + 1)); - let block = VersionedCommittedBlock::decode_all_versioned(&block_buf) - .expect(&format!("Failed to decode block â„– {}", meta_index + 1)); + .unwrap_or_else(|_| panic!("Failed to read block â„– {} data.", meta_index + 1)); + let block = SignedBlock::decode_all_versioned(&block_buf) + .unwrap_or_else(|_| panic!("Failed to decode block â„– {}", meta_index + 1)); println!("Block#{} :", meta_index + 1); println!("{block:#?}"); } diff --git a/tools/parity_scale_decoder/Cargo.toml b/tools/parity_scale_decoder/Cargo.toml index 42dfb4747d3..e2cb948ff7a 100644 --- a/tools/parity_scale_decoder/Cargo.toml +++ b/tools/parity_scale_decoder/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [features] # Disable colour for all program output. # Useful for Docker-based deployment and terminals without colour support. @@ -24,7 +27,7 @@ iroha_genesis = { workspace = true } clap = { workspace = true, features = ["derive", "cargo"] } eyre = { workspace = true } parity-scale-codec = { workspace = true } -colored = "2.0.0" +colored = "2.0.4" [build-dependencies] iroha_data_model = { workspace = true } diff --git a/tools/parity_scale_decoder/build.rs b/tools/parity_scale_decoder/build.rs index 5d63316f7a7..58e3db5dbd8 100644 --- a/tools/parity_scale_decoder/build.rs +++ b/tools/parity_scale_decoder/build.rs @@ -1,7 +1,5 @@ //! Build script that auto-updates sample binaries from sources. -#![allow(clippy::restriction)] - use std::{fs, io::Result, path::PathBuf}; use iroha_data_model::{account::NewAccount, domain::NewDomain, prelude::*}; @@ -13,7 +11,7 @@ fn main() { sample_into_binary_file::("domain").expect("Failed to encode into domain.bin."); - sample_into_binary_file::>("trigger") + sample_into_binary_file::>("trigger") .expect("Failed to encode into trigger.bin."); } diff --git a/tools/parity_scale_decoder/samples/account.bin b/tools/parity_scale_decoder/samples/account.bin index 5a2dd9b72f1..04aea960c0e 100644 Binary files a/tools/parity_scale_decoder/samples/account.bin and b/tools/parity_scale_decoder/samples/account.bin differ diff --git a/tools/parity_scale_decoder/samples/trigger.bin b/tools/parity_scale_decoder/samples/trigger.bin index 99b44b46404..aa17a69bcd0 100644 Binary files a/tools/parity_scale_decoder/samples/trigger.bin and b/tools/parity_scale_decoder/samples/trigger.bin differ diff --git a/tools/parity_scale_decoder/src/main.rs b/tools/parity_scale_decoder/src/main.rs index fb5df8a7956..dcbb4069df3 100644 --- a/tools/parity_scale_decoder/src/main.rs +++ b/tools/parity_scale_decoder/src/main.rs @@ -1,11 +1,4 @@ //! Parity Scale decoder tool for Iroha data types. For usage run with `--help` -#![allow( - clippy::arithmetic_side_effects, - clippy::std_instead_of_core, - clippy::std_instead_of_alloc -)] -#![allow(clippy::print_stdout, clippy::use_debug, clippy::unnecessary_wraps)] - use core::num::NonZeroU64; use std::{ collections::{BTreeMap, BTreeSet}, @@ -24,14 +17,11 @@ use iroha_data_model::{ asset::NewAssetDefinition, block::{ error::BlockRejectionReason, - stream::{ - BlockMessage, BlockSubscriptionRequest, VersionedBlockMessage, - VersionedBlockSubscriptionRequest, - }, - BlockHeader, CommittedBlock, VersionedCommittedBlock, + stream::{BlockMessage, BlockSubscriptionRequest}, + BlockHeader, SignedBlock, SignedBlockV1, }, domain::NewDomain, - http::{BatchedResponse, VersionedBatchedResponse}, + executor::Executor, ipfs::IpfsPath, predicate::{ ip_addr::{Ipv4Predicate, Ipv6Predicate}, @@ -45,9 +35,8 @@ use iroha_data_model::{ error::{FindError, QueryExecutionFail}, ForwardCursor, }, - transaction::{error::TransactionLimitError, SignedTransaction, TransactionLimits}, - validator::Validator, - VersionedCommittedBlockWrapper, + transaction::{error::TransactionLimitError, SignedTransactionV1, TransactionLimits}, + BatchedResponse, BatchedResponseV1, SignedBlockWrapper, }; use iroha_primitives::{ addr::{Ipv4Addr, Ipv6Addr}, @@ -118,7 +107,6 @@ pub trait DumpDecoded: Debug + DecodeAll { /// - If writing into `w` fails fn dump_decoded(mut input: &[u8], w: &mut dyn io::Write) -> Result<()> { let obj = ::decode_all(&mut input)?; - #[allow(clippy::use_debug)] writeln!(w, "{obj:#?}")?; Ok(()) } @@ -265,16 +253,15 @@ mod tests { #[test] fn decode_trigger_sample() { - let account_id = - ::Id::from_str("alice@wonderland").expect("Valid"); - let rose_definition_id = ::Id::new( + let account_id = AccountId::from_str("alice@wonderland").expect("Valid"); + let rose_definition_id = AssetDefinitionId::new( "rose".parse().expect("Valid"), "wonderland".parse().expect("Valid"), ); - let rose_id = ::Id::new(rose_definition_id, account_id.clone()); + let rose_id = AssetId::new(rose_definition_id, account_id.clone()); let trigger_id = "mint_rose".parse().expect("Valid"); - let action = Action::::new( - vec![MintBox::new(1_u32, rose_id)], + let action = Action::::new( + vec![MintExpr::new(1_u32, rose_id)], Repeats::Indefinitely, account_id, FilterBox::Data(DataEventFilter::BySome(DataEntityFilter::ByAccount( @@ -285,7 +272,7 @@ mod tests { decode_sample( "trigger.bin", - String::from("Trigger"), + String::from("Trigger"), &trigger, ); } diff --git a/tools/swarm/Cargo.toml b/tools/swarm/Cargo.toml index cbd4508bf93..bdc30322ae1 100644 --- a/tools/swarm/Cargo.toml +++ b/tools/swarm/Cargo.toml @@ -6,13 +6,14 @@ version.workspace = true authors.workspace = true license.workspace = true +[lints] +workspace = true + [dependencies] iroha_crypto.workspace = true iroha_data_model.workspace = true iroha_primitives.workspace = true -iroha_config.workspace = true color-eyre.workspace = true -expect-test.workspace = true path-absolutize.workspace = true pathdiff.workspace = true owo-colors = { workspace = true, features = ["supports-colors"] } @@ -23,3 +24,7 @@ serde_json.workspace = true derive_more.workspace = true inquire.workspace = true +[dev-dependencies] +iroha_config.workspace = true + +expect-test.workspace = true diff --git a/tools/swarm/src/cli.rs b/tools/swarm/src/cli.rs index ffe08c3fe79..eb7f45e9371 100644 --- a/tools/swarm/src/cli.rs +++ b/tools/swarm/src/cli.rs @@ -20,6 +20,11 @@ pub struct Cli { /// overwrite the file anyway, pass `--force` flag. #[arg(long, short)] pub outfile: PathBuf, + /// Disable banner in the file saying that the file is generated. + /// + /// It includes all passed arguments in order to help with reproducibility. + #[arg(long)] + pub no_banner: bool, /// Path to a directory with Iroha configuration. It will be mapped as volume for containers. /// /// The directory should contain `config.json` and `genesis.json`. diff --git a/tools/swarm/src/compose.rs b/tools/swarm/src/compose.rs index 264e393d21e..e08ab109e91 100644 --- a/tools/swarm/src/compose.rs +++ b/tools/swarm/src/compose.rs @@ -39,13 +39,26 @@ impl DockerCompose { } } - pub fn write_file(&self, path: &PathBuf) -> Result<(), color_eyre::Report> { + pub fn write_file( + &self, + path: &PathBuf, + banner_enabled: bool, + ) -> Result<(), color_eyre::Report> { + let mut file = File::create(path) + .wrap_err_with(|| eyre!("Failed to create file {}", path.display()))?; + + if banner_enabled { + file.write_all( + b"# This file is generated by iroha_swarm.\n\ + # Do not edit it manually.\n\n", + ) + .wrap_err_with(|| eyre!("Failed to write banner into {}", path.display()))?; + } + let yaml = serde_yaml::to_string(self).wrap_err("Failed to serialise YAML")?; - File::create(path) - .wrap_err_with(|| eyre!("Failed to create file {}", path.display()))? - .write_all(yaml.as_bytes()) - .wrap_err_with(|| eyre!("Failed to write YAML content into {}", path.display()))?; - Ok(()) + file.write_all(yaml.as_bytes()) + .wrap_err_with(|| eyre!("Failed to write YAML content into {}", path.display())) + .map_err(Into::into) } } @@ -98,7 +111,6 @@ impl DockerComposeService { let ports = vec![ PairColon(peer.port_p2p, peer.port_p2p), PairColon(peer.port_api, peer.port_api), - PairColon(peer.port_telemetry, peer.port_telemetry), ]; let command = if genesis_private_key.is_some() { @@ -114,7 +126,6 @@ impl DockerComposeService { key_pair: peer.key_pair.clone(), p2p_addr: peer.addr(peer.port_p2p), api_addr: peer.addr(peer.port_api), - telemetry_addr: peer.addr(peer.port_telemetry), }; Self { @@ -200,7 +211,6 @@ struct FullPeerEnv { iroha_private_key: SerializeAsJsonStr, torii_p2p_addr: SocketAddr, torii_api_url: SocketAddr, - torii_telemetry_url: SocketAddr, #[serde(skip_serializing_if = "Option::is_none")] iroha_genesis_account_public_key: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -215,7 +225,6 @@ struct CompactPeerEnv { genesis_private_key: Option, p2p_addr: SocketAddr, api_addr: SocketAddr, - telemetry_addr: SocketAddr, trusted_peers: BTreeSet, } @@ -228,7 +237,6 @@ impl From for FullPeerEnv { iroha_genesis_account_private_key: value.genesis_private_key.map(SerializeAsJsonStr), torii_p2p_addr: value.p2p_addr, torii_api_url: value.api_addr, - torii_telemetry_url: value.telemetry_addr, sumeragi_trusted_peers: SerializeAsJsonStr(value.trusted_peers), } } @@ -331,12 +339,12 @@ impl DockerComposeBuilder<'_> { Ok(compose) } - pub(crate) fn build_and_write(&self) -> color_eyre::Result<()> { + pub(crate) fn build_and_write(&self, banner_enabled: bool) -> color_eyre::Result<()> { let target_file = self.target_file; let compose = self .build() .wrap_err("Failed to build a docker compose file")?; - compose.write_file(&target_file.path) + compose.write_file(&target_file.path, banner_enabled) } } @@ -364,14 +372,12 @@ mod peer_generator { const BASE_PORT_P2P: u16 = 1337; const BASE_PORT_API: u16 = 8080; - const BASE_PORT_TELEMETRY: u16 = 8180; const BASE_SERVICE_NAME: &'_ str = "iroha"; pub struct Peer { pub name: String, pub port_p2p: u16, pub port_api: u16, - pub port_telemetry: u16, pub key_pair: KeyPair, } @@ -403,7 +409,6 @@ mod peer_generator { name: service_name.clone(), port_p2p: BASE_PORT_P2P + i, port_api: BASE_PORT_API + i, - port_telemetry: BASE_PORT_TELEMETRY + i, key_pair, }; @@ -530,7 +535,6 @@ mod tests { genesis_private_key: Some(keypair.private_key().clone()), p2p_addr: SocketAddr::from_str("127.0.0.1:1337").unwrap(), api_addr: SocketAddr::from_str("127.0.0.1:1338").unwrap(), - telemetry_addr: SocketAddr::from_str("127.0.0.1:1339").unwrap(), trusted_peers: BTreeSet::new(), } .into(); @@ -576,7 +580,6 @@ mod tests { genesis_private_key: Some(key_pair.private_key().clone()), p2p_addr: SocketAddr::from_str("iroha1:1339").unwrap(), api_addr: SocketAddr::from_str("iroha1:1338").unwrap(), - telemetry_addr: SocketAddr::from_str("iroha1:1337").unwrap(), trusted_peers: BTreeSet::new(), } .into(), @@ -600,29 +603,28 @@ mod tests { let actual = serde_yaml::to_string(&compose).expect("Should be serialisable"); let expected = expect_test::expect![[r#" - version: '3.8' - services: - iroha0: - build: . - platform: linux/amd64 - environment: - IROHA_PUBLIC_KEY: ed012039E5BF092186FACC358770792A493CA98A83740643A3D41389483CF334F748C8 - IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"db9d90d20f969177bd5882f9fe211d14d1399d5440d04e3468783d169bbc4a8e39e5bf092186facc358770792a493ca98a83740643a3d41389483cf334f748c8"}' - TORII_P2P_ADDR: iroha1:1339 - TORII_API_URL: iroha1:1338 - TORII_TELEMETRY_URL: iroha1:1337 - IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed012039E5BF092186FACC358770792A493CA98A83740643A3D41389483CF334F748C8 - IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"db9d90d20f969177bd5882f9fe211d14d1399d5440d04e3468783d169bbc4a8e39e5bf092186facc358770792a493ca98a83740643a3d41389483cf334f748c8"}' - SUMERAGI_TRUSTED_PEERS: '[]' - ports: - - 1337:1337 - - 8080:8080 - - 8081:8081 - volumes: - - ./configs/peer/legacy_stable:/config - init: true - command: iroha --submit-genesis - "#]]; + version: '3.8' + services: + iroha0: + build: . + platform: linux/amd64 + environment: + IROHA_PUBLIC_KEY: ed012039E5BF092186FACC358770792A493CA98A83740643A3D41389483CF334F748C8 + IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"db9d90d20f969177bd5882f9fe211d14d1399d5440d04e3468783d169bbc4a8e39e5bf092186facc358770792a493ca98a83740643a3d41389483cf334f748c8"}' + TORII_P2P_ADDR: iroha1:1339 + TORII_API_URL: iroha1:1338 + IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed012039E5BF092186FACC358770792A493CA98A83740643A3D41389483CF334F748C8 + IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"db9d90d20f969177bd5882f9fe211d14d1399d5440d04e3468783d169bbc4a8e39e5bf092186facc358770792a493ca98a83740643a3d41389483cf334f748c8"}' + SUMERAGI_TRUSTED_PEERS: '[]' + ports: + - 1337:1337 + - 8080:8080 + - 8081:8081 + volumes: + - ./configs/peer/legacy_stable:/config + init: true + command: iroha --submit-genesis + "#]]; expected.assert_eq(&actual); } @@ -638,21 +640,19 @@ mod tests { genesis_private_key: None, p2p_addr: SocketAddr::from_str("iroha0:1337").unwrap(), api_addr: SocketAddr::from_str("iroha0:1337").unwrap(), - telemetry_addr: SocketAddr::from_str("iroha0:1337").unwrap(), trusted_peers: BTreeSet::new(), } .into(); let actual = serde_yaml::to_string(&env).unwrap(); let expected = expect_test::expect![[r#" - IROHA_PUBLIC_KEY: ed0120415388A90FA238196737746A70565D041CFB32EAA0C89FF8CB244C7F832A6EBD - IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"6bf163fd75192b81a78cb20c5f8cb917f591ac6635f2577e6ca305c27a456a5d415388a90fa238196737746a70565d041cfb32eaa0c89ff8cb244c7f832a6ebd"}' - TORII_P2P_ADDR: iroha0:1337 - TORII_API_URL: iroha0:1337 - TORII_TELEMETRY_URL: iroha0:1337 - IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed0120415388A90FA238196737746A70565D041CFB32EAA0C89FF8CB244C7F832A6EBD - SUMERAGI_TRUSTED_PEERS: '[]' - "#]]; + IROHA_PUBLIC_KEY: ed0120415388A90FA238196737746A70565D041CFB32EAA0C89FF8CB244C7F832A6EBD + IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"6bf163fd75192b81a78cb20c5f8cb917f591ac6635f2577e6ca305c27a456a5d415388a90fa238196737746a70565d041cfb32eaa0c89ff8cb244c7f832a6ebd"}' + TORII_P2P_ADDR: iroha0:1337 + TORII_API_URL: iroha0:1337 + IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed0120415388A90FA238196737746A70565D041CFB32EAA0C89FF8CB244C7F832A6EBD + SUMERAGI_TRUSTED_PEERS: '[]' + "#]]; expected.assert_eq(&actual); } @@ -689,14 +689,12 @@ mod tests { IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"5f8d1291bf6b762ee748a87182345d135fd167062857aa4f20ba39f25e74c4b0f0321eb4139163c35f88bf78520ff7071499d7f4e79854550028a196c7b49e13"}' TORII_P2P_ADDR: iroha0:1337 TORII_API_URL: iroha0:8080 - TORII_TELEMETRY_URL: iroha0:8180 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01203420F48A9EEB12513B8EB7DAF71979CE80A1013F5F341C10DCDA4F6AA19F97A9 IROHA_GENESIS_ACCOUNT_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"5a6d5f06a90d29ad906e2f6ea8b41b4ef187849d0d397081a4a15ffcbe71e7c73420f48a9eeb12513b8eb7daf71979ce80a1013f5f341c10dcda4f6aa19f97a9"}' SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' ports: - 1337:1337 - 8080:8080 - - 8180:8180 volumes: - ./config:/config init: true @@ -709,13 +707,11 @@ mod tests { IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"8d34d2c6a699c61e7a9d5aabbbd07629029dfb4f9a0800d65aa6570113edb465a88554aa5c86d28d0eebec497235664433e807881cd31e12a1af6c4d8b0f026c"}' TORII_P2P_ADDR: iroha1:1338 TORII_API_URL: iroha1:8081 - TORII_TELEMETRY_URL: iroha1:8181 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01203420F48A9EEB12513B8EB7DAF71979CE80A1013F5F341C10DCDA4F6AA19F97A9 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' ports: - 1338:1338 - 8081:8081 - - 8181:8181 volumes: - ./config:/config init: true @@ -727,13 +723,11 @@ mod tests { IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"cf4515a82289f312868027568c0da0ee3f0fde7fef1b69deb47b19fde7cbc169312c1b7b5de23d366adcf23cd6db92ce18b2aa283c7d9f5033b969c2dc2b92f4"}' TORII_P2P_ADDR: iroha2:1339 TORII_API_URL: iroha2:8082 - TORII_TELEMETRY_URL: iroha2:8182 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01203420F48A9EEB12513B8EB7DAF71979CE80A1013F5F341C10DCDA4F6AA19F97A9 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' ports: - 1339:1339 - 8082:8082 - - 8182:8182 volumes: - ./config:/config init: true @@ -745,13 +739,11 @@ mod tests { IROHA_PRIVATE_KEY: '{"digest_function":"ed25519","payload":"ab0e99c2b845b4ac7b3e88d25a860793c7eb600a25c66c75cba0bae91e955aa6854457b2e3d6082181da73dc01c1e6f93a72d0c45268dc8845755287e98a5dee"}' TORII_P2P_ADDR: iroha3:1340 TORII_API_URL: iroha3:8083 - TORII_TELEMETRY_URL: iroha3:8183 IROHA_GENESIS_ACCOUNT_PUBLIC_KEY: ed01203420F48A9EEB12513B8EB7DAF71979CE80A1013F5F341C10DCDA4F6AA19F97A9 SUMERAGI_TRUSTED_PEERS: '[{"address":"iroha2:1339","public_key":"ed0120312C1B7B5DE23D366ADCF23CD6DB92CE18B2AA283C7D9F5033B969C2DC2B92F4"},{"address":"iroha3:1340","public_key":"ed0120854457B2E3D6082181DA73DC01C1E6F93A72D0C45268DC8845755287E98A5DEE"},{"address":"iroha1:1338","public_key":"ed0120A88554AA5C86D28D0EEBEC497235664433E807881CD31E12A1AF6C4D8B0F026C"},{"address":"iroha0:1337","public_key":"ed0120F0321EB4139163C35F88BF78520FF7071499D7F4E79854550028A196C7B49E13"}]' ports: - 1340:1340 - 8083:8083 - - 8183:8183 volumes: - ./config:/config init: true diff --git a/tools/swarm/src/main.rs b/tools/swarm/src/main.rs index 2f3d832e7c3..18e83f64be9 100644 --- a/tools/swarm/src/main.rs +++ b/tools/swarm/src/main.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + mod cli; mod compose; mod ui; @@ -17,6 +19,7 @@ fn main() -> Result<()> { peers, seed, force, + no_banner, source: image_source, outfile: target_file_raw, config_dir: config_dir_raw, @@ -41,6 +44,8 @@ fn main() -> Result<()> { } } + let banner_enabled = !no_banner; + compose::DockerComposeBuilder { target_file: &target_file, config_dir: &config_dir, @@ -48,7 +53,7 @@ fn main() -> Result<()> { peers, seed, } - .build_and_write()?; + .build_and_write(banner_enabled)?; ui::log_file_mode_complete(&target_file, &target_file_raw); diff --git a/tools/wasm_builder_cli/Cargo.toml b/tools/wasm_builder_cli/Cargo.toml index 51f4d0e0e56..b884ac5c196 100644 --- a/tools/wasm_builder_cli/Cargo.toml +++ b/tools/wasm_builder_cli/Cargo.toml @@ -6,11 +6,13 @@ version.workspace = true authors.workspace = true license.workspace = true +[lints] +workspace = true [dependencies] iroha_wasm_builder.workspace = true clap = { workspace = true, features = ["derive"] } color-eyre.workspace = true -spinoff = { workspace = true, features = ["binary", "dots12"] } +spinoff = { workspace = true, features = ["binary"] } owo-colors = { workspace = true, features = ["supports-colors"] } diff --git a/tools/wasm_builder_cli/src/main.rs b/tools/wasm_builder_cli/src/main.rs index dde4bd00e49..436dc94decb 100644 --- a/tools/wasm_builder_cli/src/main.rs +++ b/tools/wasm_builder_cli/src/main.rs @@ -1,3 +1,5 @@ +#![allow(missing_docs)] + use std::path::PathBuf; use clap::{Args, Parser}; @@ -41,7 +43,7 @@ fn main() -> color_eyre::Result<()> { Cli::Check { common: CommonArgs { path }, } => { - let builder = Builder::new(&path); + let builder = Builder::new(&path).show_output(); builder.check()?; } Cli::Build { @@ -50,31 +52,20 @@ fn main() -> color_eyre::Result<()> { optimize, outfile, } => { - let builder = Builder::new(&path); + let builder = Builder::new(&path).show_output(); let builder = if format { builder.format() } else { builder }; let output = { - let sp = spinoff::Spinner::new_with_stream( - spinoff::spinners::Dots12, - "Building the smartcontract", - None, - spinoff::Streams::Stderr, - ); + // not showing the spinner here, cargo does a progress bar for us match builder.build() { - Ok(output) => { - sp.success("Smartcontract is built"); - output - } - err => { - sp.fail("Building failed"); - err? - } + Ok(output) => output, + err => err?, } }; let output = if optimize { - let sp = spinoff::Spinner::new_with_stream( + let mut sp = spinoff::Spinner::new_with_stream( spinoff::spinners::Binary, "Optimizing the output", None, diff --git a/tools/wasm_test_runner/Cargo.toml b/tools/wasm_test_runner/Cargo.toml new file mode 100644 index 00000000000..682ad1ccce1 --- /dev/null +++ b/tools/wasm_test_runner/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "iroha_wasm_test_runner" + +edition.workspace = true +version.workspace = true +authors.workspace = true +license.workspace = true + +[lints] +workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +wasmtime = { workspace = true } +anyhow = "1.0.75" diff --git a/tools/wasm_test_runner/src/main.rs b/tools/wasm_test_runner/src/main.rs new file mode 100644 index 00000000000..a2d3b5ca91f --- /dev/null +++ b/tools/wasm_test_runner/src/main.rs @@ -0,0 +1,82 @@ +//! A tool to run `WebAssembly` tests +//! +//! This copies functionality of `webassembly-test-runner`, but with an ability to indicate failure with an exit code. + +use std::process::ExitCode; + +use anyhow::{bail, Result}; +use wasmtime::{Engine, Instance, Module, Store}; + +struct TestMeta<'a> { + name: &'a str, + ignore: bool, +} + +fn main() -> Result { + let argv0 = std::env::args().next().unwrap(); + + let file = match std::env::args().nth(1) { + Some(it) => it, + None => { + bail!("usage: {} tests.wasm", argv0); + } + }; + // Modules can be compiled through either the text or binary format + let engine = Engine::default(); + let module = Module::from_file(&engine, &file)?; + let mut tests = Vec::new(); + for export in module.exports() { + if let Some(name) = export.name().strip_prefix("$webassembly-test$") { + let mut ignore = true; + let name = name.strip_prefix("ignore$").unwrap_or_else(|| { + ignore = false; + name + }); + tests.push((export, TestMeta { name, ignore })); + } + } + let total = tests.len(); + + eprintln!("\nrunning {total} tests"); + let mut store = Store::new(&engine, ()); + let mut instance = Instance::new(&mut store, &module, &[])?; + let mut passed = 0; + let mut failed = 0; + let mut ignored = 0; + for (export, meta) in tests { + eprint!("test {} ...", meta.name); + if meta.ignore { + ignored += 1; + eprintln!(" ignored"); + } else { + let f = instance.get_typed_func::<(), ()>(&mut store, export.name())?; + + let pass = f.call(&mut store, ()).is_ok(); + if pass { + passed += 1; + eprintln!(" ok"); + } else { + // Reset instance on test failure. WASM uses `panic=abort`, so + // `Drop`s are not called after test failures, and a failed test + // might leave an instance in an inconsistent state. + store = Store::new(&engine, ()); + instance = Instance::new(&mut store, &module, &[])?; + + failed += 1; + eprintln!(" FAILED"); + } + } + } + eprintln!( + "\ntest result: {}. {} passed; {} failed; {} ignored;", + if failed > 0 { "FAILED" } else { "ok" }, + passed, + failed, + ignored, + ); + Ok(if failed > 0 { + ExitCode::FAILURE + } else { + ExitCode::SUCCESS + }) +} diff --git a/version/Cargo.toml b/version/Cargo.toml index 91041519a36..89b396ff8a9 100644 --- a/version/Cargo.toml +++ b/version/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [features] default = ["std", "derive", "scale", "json"] # Enable static linkage of the rust standard library. diff --git a/version/derive/Cargo.toml b/version/derive/Cargo.toml index 5667539058d..be02b1e5acf 100644 --- a/version/derive/Cargo.toml +++ b/version/derive/Cargo.toml @@ -7,6 +7,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true diff --git a/version/derive/src/lib.rs b/version/derive/src/lib.rs index f367086eef6..8d7fdc71e16 100644 --- a/version/derive/src/lib.rs +++ b/version/derive/src/lib.rs @@ -1,5 +1,4 @@ //! Crate containing schema related macro functionality -#![allow(clippy::arithmetic_side_effects)] use std::ops::Range; diff --git a/version/derive/tests/json.rs b/version/derive/tests/json.rs index 76bc312f365..4513cb72cfe 100644 --- a/version/derive/tests/json.rs +++ b/version/derive/tests/json.rs @@ -1,11 +1,5 @@ #[cfg(test)] mod tests { - #![allow( - clippy::items_after_statements, - clippy::wildcard_imports, - clippy::restriction - )] - use iroha_version::{ error::{Error, Result}, json::*, @@ -74,6 +68,8 @@ mod tests { #[test] fn unsupported_version() -> Result<(), String> { + use model_1::*; + let json = { use model_2::*; @@ -83,8 +79,6 @@ mod tests { .map_err(|e| e.to_string())? }; - use model_1::*; - let raw_string = "{\"version\":\"3\",\"content\":\"test string\"}"; let decoded_message = VersionedMessage::from_versioned_json_str(&json); match decoded_message { diff --git a/version/derive/tests/scale.rs b/version/derive/tests/scale.rs index 39c060da30b..925077b8fcf 100644 --- a/version/derive/tests/scale.rs +++ b/version/derive/tests/scale.rs @@ -1,11 +1,5 @@ #[cfg(test)] mod tests { - #![allow( - clippy::items_after_statements, - clippy::wildcard_imports, - clippy::restriction - )] - use iroha_version::{ error::{Error, Result}, scale::*, @@ -73,6 +67,8 @@ mod tests { #[test] fn unsupported_version() -> Result<(), String> { + use model_1::*; + let bytes = { use model_2::*; @@ -80,7 +76,6 @@ mod tests { versioned_message.encode_versioned() }; - use model_1::*; let raw_string = "test string".encode(); let decoded_message = VersionedMessage::decode_all_versioned(&bytes); match decoded_message { diff --git a/version/src/lib.rs b/version/src/lib.rs index 841197968f8..862d71fe8e1 100644 --- a/version/src/lib.rs +++ b/version/src/lib.rs @@ -1,10 +1,7 @@ //! Structures, traits and impls related to versioning. //! //! For usage examples see [`iroha_version_derive::declare_versioned`]. - -#![allow(clippy::module_name_repetitions)] #![cfg_attr(not(feature = "std"), no_std)] -#![allow(clippy::std_instead_of_core)] #[cfg(not(feature = "std"))] extern crate alloc; @@ -120,7 +117,7 @@ pub mod error { impl warp::Reply for &Error { fn into_response(self) -> warp::reply::Response { warp::reply::with_status( - format!("Transaction Rejected (Malformed), Reason : '{}'", self), + format!("Transaction Rejected (Malformed), Reason : '{self}'"), self.status_code(), ) .into_response() @@ -262,7 +259,6 @@ pub mod prelude { #[cfg(test)] mod tests { - #![allow(clippy::restriction)] use super::*; pub struct VersionedContainer(pub u8); diff --git a/wasm/.cargo/config.toml b/wasm/.cargo/config.toml deleted file mode 100644 index 00ec8ee28dc..00000000000 --- a/wasm/.cargo/config.toml +++ /dev/null @@ -1,5 +0,0 @@ -[build] -target = "wasm32-unknown-unknown" - -[target.wasm32-unknown-unknown] -runner = "webassembly-test-runner" diff --git a/wasm/Cargo.toml b/wasm/Cargo.toml deleted file mode 100644 index 0320c154c9a..00000000000 --- a/wasm/Cargo.toml +++ /dev/null @@ -1,55 +0,0 @@ -[workspace.package] -edition = "2021" -version = "2.0.0-pre-rc.19" -# TODO: teams are being deprecated update the authors URL -authors = ["Iroha 2 team "] - -license = "Apache-2.0" - -[profile.dev] -panic = "abort" - -[profile.release] -panic = "abort" - -[workspace] -resolver = "2" -members = [ - "derive", - "validator", - "validator/derive", - "trigger", - "trigger/derive", -] - -[workspace.dependencies] -iroha_data_model = { version = "=2.0.0-pre-rc.19", path = "../data_model", default-features = false } -syn = { version = "1", default-features = false, features = ["full", "extra-traits", "derive"] } -quote = "1.0" -proc-macro2 = "1.0.49" - -webassembly-test = "0.1.0" - -[package] -name = "iroha_wasm" - -version.workspace = true -authors.workspace = true -edition.workspace = true - -license.workspace = true - -[features] -# Enables debugging tools such as `dbg()` and `DebugUnwrapExt` -debug = [] - -[dependencies] -iroha_data_model.workspace = true -iroha_wasm_derive = { version = "=2.0.0-pre-rc.19", path = "derive" } - -parity-scale-codec = { version = "3.1.5", default-features = false } -wee_alloc = "0.4.5" - -[dev-dependencies] -webassembly-test.workspace = true - diff --git a/wasm/lints.toml b/wasm/lints.toml deleted file mode 120000 index 883d2da34b1..00000000000 --- a/wasm/lints.toml +++ /dev/null @@ -1 +0,0 @@ -../lints.toml \ No newline at end of file diff --git a/wasm/src/lib.rs b/wasm/src/lib.rs deleted file mode 100644 index 74e27f17301..00000000000 --- a/wasm/src/lib.rs +++ /dev/null @@ -1,494 +0,0 @@ -//! API which simplifies writing of smartcontracts -#![no_std] -// Required because of `unsafe` code and `no_mangle` use -#![allow(unsafe_code)] - -#[cfg(all(not(test), not(target_pointer_width = "32")))] -compile_error!("Target architectures other then 32-bit are not supported"); - -#[cfg(all(not(test), not(all(target_arch = "wasm32", target_os = "unknown"))))] -compile_error!("Targets other then wasm32-unknown-unknown are not supported"); - -extern crate alloc; - -use alloc::{boxed::Box, collections::BTreeMap, format, vec::Vec}; -use core::ops::RangeFrom; - -#[cfg(not(test))] -use data_model::wasm::payloads; -use data_model::{ - isi::Instruction, - prelude::*, - query::{Query, QueryBox}, -}; -use debug::DebugExpectExt as _; -pub use iroha_data_model as data_model; -pub use iroha_wasm_derive::main; -use parity_scale_codec::{DecodeAll, Encode}; - -pub mod debug; -pub mod log; - -#[global_allocator] -static ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT; - -#[no_mangle] -extern "C" fn _iroha_wasm_alloc(len: usize) -> *const u8 { - if len == 0 { - debug::dbg_panic("Cannot allocate 0 bytes"); - } - let layout = core::alloc::Layout::array::(len).dbg_expect("Cannot allocate layout"); - // Safety: safe because `layout` is guaranteed to have non-zero size - unsafe { alloc::alloc::alloc_zeroed(layout) } -} - -/// # Safety -/// - `offset` is a pointer to a `[u8; len]` which is allocated in the WASM memory. -/// - This function can't call destructor of the encoded object. -#[no_mangle] -unsafe extern "C" fn _iroha_wasm_dealloc(offset: *mut u8, len: usize) { - let _box = Box::from_raw(core::slice::from_raw_parts_mut(offset, len)); -} - -/// Implementing instructions can be executed on the host -pub trait ExecuteOnHost: Instruction { - /// Execute instruction on the host - /// - /// # Errors - /// - /// - If instruction validation failed - /// - If instruction execution failed - fn execute(&self) -> Result<(), ValidationFail>; -} - -/// Implementing queries can be executed on the host -pub trait QueryHost: Query { - /// Execute query on the host - /// - /// # Errors - /// - /// - If query validation failed - /// - If query execution failed - fn execute(&self) -> Result; -} - -// TODO: Remove the Clone bound. It can be done by custom serialization to InstructionBox -impl + Encode + Clone> ExecuteOnHost for I { - fn execute(&self) -> Result<(), ValidationFail> { - #[cfg(not(test))] - use host::execute_instruction as host_execute_instruction; - #[cfg(test)] - use tests::_iroha_wasm_execute_instruction_mock as host_execute_instruction; - - // TODO: Redundant conversion into `InstructionBox` - let isi_box: InstructionBox = self.clone().into(); - // Safety: `host_execute_instruction` doesn't take ownership of it's pointer parameter - unsafe { - decode_with_length_prefix_from_raw(encode_and_execute( - &isi_box, - host_execute_instruction, - )) - } - } -} - -// TODO: Remove the Clone bound. It can be done by custom serialization/deserialization to QueryBox -impl + Encode + Clone> QueryHost for Q -where - Q::Output: DecodeAll, - >::Error: core::fmt::Debug, -{ - fn execute(&self) -> Result { - #[cfg(not(test))] - use host::execute_query as host_execute_query; - #[cfg(test)] - use tests::_iroha_wasm_execute_query_mock as host_execute_query; - - // TODO: Redundant conversion into `QueryBox` - let query_box: QueryBox = self.clone().into(); - // Safety: - `host_execute_query` doesn't take ownership of it's pointer parameter - // - ownership of the returned result is transferred into `_decode_from_raw` - let res: Result = unsafe { - decode_with_length_prefix_from_raw(encode_and_execute(&query_box, host_execute_query)) - }; - - res.map(|value| value.try_into().expect("Query returned invalid type")) - } -} - -/// World state view of the host -#[derive(Debug, Clone, Copy)] -pub struct Host; - -impl iroha_data_model::evaluate::ExpressionEvaluator for Host { - fn evaluate( - &self, - expression: &E, - ) -> Result { - expression.evaluate(&Context::new()) - } -} - -/// Context of expression evaluation -#[derive(Clone, Default)] -#[repr(transparent)] -pub struct Context { - values: BTreeMap, -} - -impl Context { - /// Create new [`Self`] - pub fn new() -> Self { - Self { - values: BTreeMap::new(), - } - } -} - -impl iroha_data_model::evaluate::Context for Context { - fn query(&self, query: &QueryBox) -> Result { - query.execute() - } - - fn get(&self, name: &Name) -> Option<&Value> { - self.values.get(name) - } - - fn update(&mut self, other: impl IntoIterator) { - self.values.extend(other) - } -} - -/// Get payload for smart contract `main()` entrypoint. -#[cfg(not(test))] -pub fn get_smart_contract_payload() -> payloads::SmartContract { - // Safety: ownership of the returned result is transferred into `_decode_from_raw` - unsafe { decode_with_length_prefix_from_raw(host::get_smart_contract_payload()) } -} - -/// Get payload for trigger `main()` entrypoint. -#[cfg(not(test))] -pub fn get_trigger_payload() -> payloads::Trigger { - // Safety: ownership of the returned result is transferred into `_decode_from_raw` - unsafe { decode_with_length_prefix_from_raw(host::get_trigger_payload()) } -} - -/// Get payload for `validate_transaction()` entrypoint. -/// -/// # Traps -/// -/// Host side will generate a trap if this function was called not from a -/// validator `validate_transaction()` entrypoint. -#[cfg(not(test))] -pub fn get_validate_transaction_payload() -> payloads::ValidateTransaction { - // Safety: ownership of the returned result is transferred into `_decode_from_raw` - unsafe { decode_with_length_prefix_from_raw(host::get_validate_transaction_payload()) } -} - -/// Get payload for `validate_instruction()` entrypoint. -/// -/// # Traps -/// -/// Host side will generate a trap if this function was called not from a -/// validator `validate_instruction()` entrypoint. -#[cfg(not(test))] -pub fn get_validate_instruction_payload() -> payloads::ValidateInstruction { - // Safety: ownership of the returned result is transferred into `_decode_from_raw` - unsafe { decode_with_length_prefix_from_raw(host::get_validate_instruction_payload()) } -} - -/// Get payload for `validate_query()` entrypoint. -/// -/// # Traps -/// -/// Host side will generate a trap if this function was called not from a -/// validator `validate_query()` entrypoint. -#[cfg(not(test))] -pub fn get_validate_query_payload() -> payloads::ValidateQuery { - // Safety: ownership of the returned result is transferred into `_decode_from_raw` - unsafe { decode_with_length_prefix_from_raw(host::get_validate_query_payload()) } -} - -/// Get payload for `migrate()` entrypoint. -/// -/// # Traps -/// -/// Host side will generate a trap if this function was called not from a -/// validator `migrate()` entrypoint. -#[cfg(not(test))] -pub fn get_migrate_payload() -> payloads::Migrate { - // Safety: ownership of the returned result is transferred into `_decode_from_raw` - unsafe { decode_with_length_prefix_from_raw(host::get_migrate_payload()) } -} - -/// Set new [`PermissionTokenSchema`]. -/// -/// # Errors -/// -/// - If execution on Iroha side failed -/// -/// # Traps -/// -/// Host side will generate a trap if this function was not called from a -/// validator's `migrate()` entrypoint. -#[cfg(not(test))] -pub fn set_permission_token_schema(schema: &data_model::permission::PermissionTokenSchema) { - // Safety: - ownership of the returned result is transferred into `_decode_from_raw` - unsafe { encode_and_execute(&schema, host::set_permission_token_schema) } -} - -#[cfg(not(test))] -mod host { - #[link(wasm_import_module = "iroha")] - extern "C" { - /// Execute encoded query by providing offset and length - /// into WebAssembly's linear memory where query is stored - /// - /// # Warning - /// - /// This function doesn't take ownership of the provided allocation - /// but it does transfer ownership of the result to the caller - pub(super) fn execute_query(ptr: *const u8, len: usize) -> *const u8; - - /// Execute encoded instruction by providing offset and length - /// into WebAssembly's linear memory where instruction is stored - /// - /// # Warning - /// - /// This function doesn't take ownership of the provided allocation - /// but it does transfer ownership of the result to the caller - pub(super) fn execute_instruction(ptr: *const u8, len: usize) -> *const u8; - - /// Get payload for smart contract `main()` entrypoint. - /// - /// # Warning - /// - /// This function does transfer ownership of the result to the caller - pub(super) fn get_smart_contract_payload() -> *const u8; - - /// Get payload for trigger `main()` entrypoint. - /// - /// # Warning - /// - /// This function does transfer ownership of the result to the caller - pub(super) fn get_trigger_payload() -> *const u8; - - /// Get payload for `validate_transaction()` entrypoint. - /// - /// # Warning - /// - /// This function does transfer ownership of the result to the caller - pub(super) fn get_validate_transaction_payload() -> *const u8; - - /// Get payload for `validate_instruction()` entrypoint. - /// - /// # Warning - /// - /// This function does transfer ownership of the result to the caller - pub(super) fn get_validate_instruction_payload() -> *const u8; - - /// Get payload for `validate_query()` entrypoint. - /// - /// # Warning - /// - /// This function does transfer ownership of the result to the caller - pub(super) fn get_validate_query_payload() -> *const u8; - - /// Get payload for `migrate()` entrypoint. - /// - /// # Warning - /// - /// This function does transfer ownership of the result to the caller - pub(super) fn get_migrate_payload() -> *const u8; - - /// Set new [`PermissionTokenSchema`]. - pub(super) fn set_permission_token_schema(ptr: *const u8, len: usize); - } -} - -/// Decode the object from given pointer where first element is the size of the object -/// following it. This can be considered a custom encoding format. -/// -/// # Warning -/// -/// This method takes ownership of the given pointer -/// -/// # Safety -/// -/// It's safe to call this function as long as it's safe to construct, from the given -/// pointer, byte array of prefix length and `Box<[u8]>` containing the encoded object -unsafe fn decode_with_length_prefix_from_raw(ptr: *const u8) -> T { - let len_size_bytes = core::mem::size_of::(); - - let len = usize::from_le_bytes( - core::slice::from_raw_parts(ptr, len_size_bytes) - .try_into() - .expect("Prefix length size(bytes) incorrect. This is a bug."), - ); - - _decode_from_raw_in_range(ptr, len, len_size_bytes..) -} - -/// Decode the object from given pointer and length -/// -/// # Warning -/// -/// This method takes ownership of the given pointer -/// -/// # Safety -/// -/// It's safe to call this function as long as it's safe to construct, from the given -/// pointer, `Box<[u8]>` containing the encoded object -unsafe fn _decode_from_raw(ptr: *const u8, len: usize) -> T { - _decode_from_raw_in_range(ptr, len, 0..) -} - -/// Decode the object from given pointer and length in the given range -/// -/// # Warning -/// -/// This method takes ownership of the given pointer -/// -/// # Safety -/// -/// It's safe to call this function as long as it's safe to construct, from the given -/// pointer, `Box<[u8]>` containing the encoded object -unsafe fn _decode_from_raw_in_range( - ptr: *const u8, - len: usize, - range: RangeFrom, -) -> T { - let bytes = Box::from_raw(core::slice::from_raw_parts_mut(ptr.cast_mut(), len)); - - #[allow(clippy::expect_fun_call)] - T::decode_all(&mut &bytes[range]).expect( - format!( - "Decoding of {} failed. This is a bug", - core::any::type_name::() - ) - .as_str(), - ) -} - -/// Encode the given object and call the given function with the pointer and length of the allocation -/// -/// # Warning -/// -/// Ownership of the returned allocation is transfered to the caller -/// -/// # Safety -/// -/// The given function must not take ownership of the pointer argument -unsafe fn encode_and_execute( - obj: &T, - fun: unsafe extern "C" fn(*const u8, usize) -> O, -) -> O { - // NOTE: It's imperative that encoded object is stored on the heap - // because heap corresponds to linear memory when compiled to wasm - let bytes = obj.encode(); - - fun(bytes.as_ptr(), bytes.len()) -} - -/// Encode the given `val` as a vector of bytes with the size of the object at the beginning -// -// TODO: Write a separate crate for codec/protocol between Iroha and smartcontract -pub fn encode_with_length_prefix(val: &T) -> Box<[u8]> { - let len_size_bytes = core::mem::size_of::(); - - let mut r = Vec::with_capacity( - len_size_bytes - .checked_add(val.size_hint()) - .dbg_expect("Overflow during length computation"), - ); - - // Reserve space for length - r.resize(len_size_bytes, 0); - val.encode_to(&mut r); - - // Store length of the whole vector as byte array at the beginning of the vec - let len = r.len(); - r[..len_size_bytes].copy_from_slice(&len.to_le_bytes()); - - r.into_boxed_slice() -} - -/// Most used items -pub mod prelude { - pub use crate::{debug::*, ExecuteOnHost, QueryHost}; -} - -#[cfg(test)] -mod tests { - #![allow(clippy::restriction)] - #![allow(clippy::pedantic)] - - use core::{mem::ManuallyDrop, slice}; - - use webassembly_test::webassembly_test; - - use super::*; - - const QUERY_RESULT: Result = - Ok(Value::Numeric(NumericValue::U32(1234_u32))); - const ISI_RESULT: Result<(), ValidationFail> = Ok(()); - const EXPRESSION_RESULT: NumericValue = NumericValue::U32(5_u32); - - fn get_test_instruction() -> InstructionBox { - let new_account_id = "mad_hatter@wonderland".parse().expect("Valid"); - let register_isi = RegisterBox::new(Account::new(new_account_id, [])); - - register_isi.into() - } - - fn get_test_query() -> QueryBox { - let account_id: AccountId = "alice@wonderland".parse().expect("Valid"); - FindAccountById::new(account_id).into() - } - - fn get_test_expression() -> EvaluatesTo { - Add::new(1_u32, 2_u32).into() - } - - #[no_mangle] - pub unsafe extern "C" fn _iroha_wasm_execute_instruction_mock( - ptr: *const u8, - len: usize, - ) -> *const u8 { - let bytes = slice::from_raw_parts(ptr, len); - let instruction = InstructionBox::decode_all(&mut &*bytes); - assert_eq!(get_test_instruction(), instruction.unwrap()); - - ManuallyDrop::new(encode_with_length_prefix(&ISI_RESULT)).as_ptr() - } - - #[no_mangle] - pub unsafe extern "C" fn _iroha_wasm_execute_query_mock( - ptr: *const u8, - len: usize, - ) -> *const u8 { - let bytes = slice::from_raw_parts(ptr, len); - let query = QueryBox::decode_all(&mut &*bytes).unwrap(); - assert_eq!(query, get_test_query()); - - ManuallyDrop::new(encode_with_length_prefix(&QUERY_RESULT)).as_ptr() - } - - #[webassembly_test] - fn execute_instruction() { - get_test_instruction().execute().unwrap(); - } - - #[webassembly_test] - fn execute_query() { - assert_eq!(get_test_query().execute(), QUERY_RESULT); - } - - #[webassembly_test] - fn evaluate_expression() { - assert_eq!( - get_test_expression().evaluate(&Context::new()), - Ok(EXPRESSION_RESULT) - ); - } -} diff --git a/wasm/trigger/src/lib.rs b/wasm/trigger/src/lib.rs deleted file mode 100644 index 5915f89034f..00000000000 --- a/wasm/trigger/src/lib.rs +++ /dev/null @@ -1,13 +0,0 @@ -//! Iroha Trigger Rust SDK - -#![no_std] - -pub use iroha_trigger_derive::main; -pub use iroha_wasm::{self, data_model, *}; - -pub mod prelude { - //! Common imports used by triggers - - pub use iroha_trigger_derive::main; - pub use iroha_wasm::{data_model::prelude::*, prelude::*}; -} diff --git a/wasm/validator/Cargo.toml b/wasm/validator/Cargo.toml deleted file mode 100644 index 607f70bc836..00000000000 --- a/wasm/validator/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "iroha_validator" - -version.workspace = true -authors.workspace = true -edition.workspace = true -license.workspace = true - -[features] -# Enables debugging tools such as `dbg()` and `DebugUnwrapExt` -debug = ["iroha_wasm/debug"] - -[dependencies] -iroha_wasm = { version = "2.0.0-pre-rc.19", path = ".." } -iroha_schema = { path = "../../schema", version = "=2.0.0-pre-rc.19" } -iroha_validator_derive = { version = "2.0.0-pre-rc.19", path = "derive" } - -serde = { version = "1.0.151", default-features = false } -serde_json = { version = "1.0.91", default-features = false } - -[dev-dependencies] -webassembly-test.workspace = true diff --git a/wasm/validator/derive/src/lib.rs b/wasm/validator/derive/src/lib.rs deleted file mode 100644 index 1d8c98f81df..00000000000 --- a/wasm/validator/derive/src/lib.rs +++ /dev/null @@ -1,187 +0,0 @@ -//! Crate with validator-related derive macros. - -#![allow(clippy::panic)] - -use proc_macro::TokenStream; -use quote::quote; -use syn::{parse_macro_input, parse_quote, DeriveInput}; - -mod conversion; -mod entrypoint; -mod token; -mod validate; - -/// Annotate the user-defined function that starts the execution of a validator. -/// -/// There are 4 acceptable forms of this macro usage. See examples. -/// -/// # Examples -/// -/// ```ignore -/// use iroha_validator::prelude::*; -/// -/// #[entrypoint] -/// pub fn migrate(block_height: u64) -> MigrationResult { -/// todo!() -/// } -/// -/// #[entrypoint] -/// pub fn validate_transaction( -/// authority: AccountId, -/// transaction: VersionedSignedTransaction, -/// block_height: u64, -/// ) -> Result { -/// todo!() -/// } -/// -/// #[entrypoint] -/// pub fn validate_instruction(authority: AccountId, instruction: InstructionBox, block_height: u64) -> Result { -/// todo!() -/// } -/// -/// #[entrypoint] -/// pub fn validate_query(authority: AccountId, query: QueryBox, block_height: u64) -> Result { -/// todo!() -/// } -/// ``` -#[proc_macro_attribute] -pub fn entrypoint(attr: TokenStream, item: TokenStream) -> TokenStream { - entrypoint::impl_entrypoint(attr, item) -} - -/// Derive macro for `Token` trait. -/// -/// # Example -/// -/// ```ignore -/// use iroha_validator::{permission, prelude::*}; -/// -/// #[derive(Token, ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] -/// #[validate(permission::asset::Owner)] -/// struct CanDoSomethingWithAsset { -/// some_data: String, -/// asset_id: AssetId, -/// } -/// -/// #[entrypoint(params = "[authority, operation]")] -/// fn validate(authority: AccountId, operation: NeedsValidationBox) -> Result { -/// let NeedsValidationBox::Instruction(instruction) = operation else { -/// pass!(); -/// }; -/// -/// validate_grant_revoke!(, (authority, instruction)); -/// -/// CanDoSomethingWithAsset { -/// some_data: "some data".to_owned(), -/// asset_id: parse!("rose#wonderland" as AssetId), -/// }.is_owned_by(&authority) -/// } -/// ``` -#[proc_macro_derive(Token)] -pub fn derive_token(input: TokenStream) -> TokenStream { - token::impl_derive_token(input) -} - -/// Derive macro for `ValidateGrantRevoke` trait. -/// -/// # Attributes -/// -/// This macro requires `validate` or a group of `validate_grant` and `validate_revoke` attributes. -/// -/// ## `validate` attribute -/// -/// Use `validate` to specify [*Pass Condition*](#permission) for both `Grant` and `Revoke` -/// instructions validation. -/// -/// ## `validate_grant` and `validate_revoke` attributes -/// -/// Use `validate_grant` together with `validate_revoke` to specify *pass condition* for -/// `Grant` and `Revoke` instructions validation separately. -/// -/// # Pass conditions -/// -/// You can pass any type implementing `iroha_validator::permission::PassCondition` -/// and `From<&YourToken>` traits. -/// -/// ## Builtin -/// -/// There are some builtin pass conditions: -/// -/// - `asset_definition::Owner` - checks if the authority is the asset definition owner; -/// - `asset::Owner` - checks if the authority is the asset owner; -/// - `account::Owner` - checks if the authority is the account owner. -/// - `AlwaysPass` - checks nothing and always passes. -/// - `OnlyGenesis` - checks that block height is 0. -/// -/// -/// Also check out `iroha_validator::permission::derive_conversion` module -/// for conversion derive macros from your token to this *Pass Conditions*. -/// -/// ## Why *Pass Conditions*? -/// -/// With that you can easily derive one of most popular implementations to remove boilerplate code. -/// -/// ## Manual `ValidateGrantRevoke` implementation VS Custom *Pass Condition* -/// -/// General advice is to use custom *Pass Condition* if you need this custom validation -/// multiple times in different tokens. Otherwise, you can implement `ValidateGrantRevoke` trait manually. -/// -/// In future there will be combinators like `&&` and `||` to combine multiple *Pass Conditions*. -/// -/// # Example -/// -/// See [`Token`] derive macro example. -// -// TODO: Add combinators (#3255). -// Example: -// -// ``` -// #[derive(Token, ValidateGrantRevoke)] -// #[validate(Creator || Admin)] -// pub struct CanDoSomethingWithAsset { -// ... -// } -// ``` -#[proc_macro_derive( - ValidateGrantRevoke, - attributes(validate, validate_grant, validate_revoke) -)] -pub fn derive_validate(input: TokenStream) -> TokenStream { - validate::impl_derive_validate(input) -} - -/// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion -/// from your token to a `permission::asset_definition::Owner` type. -/// -/// Requires `asset_definition_id` field in the token. -/// -/// Implements [`From`] for `permission::asset_definition::Owner` -/// and not [`Into`] for your type. [`Into`] will be implemented automatically. -#[proc_macro_derive(RefIntoAssetDefinitionOwner)] -pub fn derive_ref_into_asset_definition_owner(input: TokenStream) -> TokenStream { - conversion::impl_derive_ref_into_asset_definition_owner(input) -} - -/// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion -/// from your token to a `permission::asset::Owner` type. -/// -/// Requires `asset_id` field in the token. -/// -/// Implements [`From`] for `permission::asset::Owner` -/// and not [`Into`] for your type. [`Into`] will be implemented automatically. -#[proc_macro_derive(RefIntoAssetOwner)] -pub fn derive_ref_into_asset_owner(input: TokenStream) -> TokenStream { - conversion::impl_derive_ref_into_asset_owner(input) -} - -/// Should be used together with [`ValidateGrantRevoke`] derive macro to derive a conversion -/// from your token to a `permission::account::Owner` type. -/// -/// Requires `account_id` field in the token. -/// -/// Implements [`From`] for `permission::asset::Owner` -/// and not [`Into`] for your type. [`Into`] will be implemented automatically. -#[proc_macro_derive(RefIntoAccountOwner)] -pub fn derive_ref_into_account_owner(input: TokenStream) -> TokenStream { - conversion::impl_derive_ref_into_account_owner(input) -} diff --git a/wasm/validator/src/default.rs b/wasm/validator/src/default.rs deleted file mode 100644 index 364fc1b72c9..00000000000 --- a/wasm/validator/src/default.rs +++ /dev/null @@ -1,1606 +0,0 @@ -//! Definition of Iroha default validator and accompanying validation functions -#![allow(missing_docs, clippy::missing_errors_doc)] - -use alloc::{borrow::ToOwned as _, format, string::String}; - -pub use account::{ - visit_burn_account_public_key, visit_mint_account_public_key, - visit_mint_account_signature_check_condition, visit_remove_account_key_value, - visit_set_account_key_value, visit_unregister_account, -}; -pub use asset::{ - visit_burn_asset, visit_mint_asset, visit_register_asset, visit_remove_asset_key_value, - visit_set_asset_key_value, visit_transfer_asset, visit_unregister_asset, -}; -pub use asset_definition::{ - visit_remove_asset_definition_key_value, visit_set_asset_definition_key_value, - visit_transfer_asset_definition, visit_unregister_asset_definition, -}; -pub use domain::{ - visit_remove_domain_key_value, visit_set_domain_key_value, visit_unregister_domain, -}; -pub use parameter::{visit_new_parameter, visit_set_parameter}; -pub use peer::visit_unregister_peer; -pub use permission_token::{visit_grant_account_permission, visit_revoke_account_permission}; -pub use role::{ - visit_grant_account_role, visit_register_role, visit_revoke_account_role, visit_unregister_role, -}; -pub use trigger::{ - visit_execute_trigger, visit_mint_trigger_repetitions, visit_unregister_trigger, -}; -pub use validator::visit_upgrade_validator; - -use super::*; -use crate::{permission, permission::Token as _, prelude::*}; - -macro_rules! evaluate_expr { - ($visitor:ident, $authority:ident, <$isi:ident as $isi_type:ty>::$field:ident()) => {{ - $visitor.visit_expression($authority, $isi.$field()); - - $visitor.evaluate($isi.$field()).expect(&alloc::format!( - "Failed to evaluate field '{}::{}'", - stringify!($isi_type), - stringify!($field), - )) - }}; -} - -/// Apply `callback` macro for all token types from this crate. -/// -/// Callback technique is used because of macro expansion order. With that technique we can -/// apply callback to token types declared in other modules. -/// -/// # WARNING !!! -/// -/// If you add new module with tokens don't forget to add it here! -macro_rules! map_all_crate_tokens { - ($callback:ident) => { - $crate::default::account::map_tokens!($callback); - $crate::default::asset::map_tokens!($callback); - $crate::default::asset_definition::map_tokens!($callback); - $crate::default::domain::map_tokens!($callback); - $crate::default::parameter::map_tokens!($callback); - $crate::default::peer::map_tokens!($callback); - $crate::default::role::map_tokens!($callback); - $crate::default::trigger::map_tokens!($callback); - $crate::default::validator::map_tokens!($callback); - }; -} - -macro_rules! token { - ($($meta:meta)* $item:item) => { - #[derive(PartialEq, Eq, serde::Serialize, serde::Deserialize)] - #[derive(iroha_schema::IntoSchema)] - #[derive(Clone, Token)] - $($meta)* - $item - }; -} - -pub(crate) use map_all_crate_tokens; - -pub fn default_permission_token_schema() -> PermissionTokenSchema { - let mut schema = iroha_validator::PermissionTokenSchema::default(); - - macro_rules! add_to_schema { - ($token_ty:ty) => { - schema.insert::<$token_ty>(); - }; - } - - iroha_validator::default::map_all_crate_tokens!(add_to_schema); - - schema -} - -/// Default validation for [`VersionedSignedTransaction`]. -/// -/// # Warning -/// -/// Each instruction is executed in sequence following successful validation. -/// [`Executable::Wasm`] is not executed because it is validated on the host side. -pub fn visit_transaction( - validator: &mut V, - authority: &AccountId, - transaction: &VersionedSignedTransaction, -) { - match transaction.payload().instructions() { - Executable::Wasm(wasm) => validator.visit_wasm(authority, wasm), - Executable::Instructions(instructions) => { - for isi in instructions { - if validator.verdict().is_ok() { - validator.visit_instruction(authority, isi); - } - } - } - } -} - -/// Default validation for [`InstructionBox`]. -/// -/// # Warning -/// -/// Instruction is executed following successful validation -pub fn visit_instruction( - validator: &mut V, - authority: &AccountId, - isi: &InstructionBox, -) { - macro_rules! isi_validators { - ( - single {$( - $validator:ident($isi:ident) - ),+ $(,)?} - composite {$( - $composite_validator:ident($composite_isi:ident) - ),+ $(,)?} - ) => { - match isi { - InstructionBox::NewParameter(isi) => { - let parameter = evaluate_expr!(validator, authority, ::parameter()); - validator.visit_new_parameter(authority, NewParameter{parameter}); - - if validator.verdict().is_ok() { - isi_validators!(@execute isi); - } - } - InstructionBox::SetParameter(isi) => { - let parameter = evaluate_expr!(validator, authority, ::parameter()); - validator.visit_set_parameter(authority, SetParameter{parameter}); - - if validator.verdict().is_ok() { - isi_validators!(@execute isi); - } - } - InstructionBox::ExecuteTrigger(isi) => { - let trigger_id = evaluate_expr!(validator, authority, ::trigger_id()); - validator.visit_execute_trigger(authority, ExecuteTrigger{trigger_id}); - - if validator.verdict().is_ok() { - isi_validators!(@execute isi); - } - } - InstructionBox::Log(isi) => { - let msg = evaluate_expr!(validator, authority, ::msg()); - let level = evaluate_expr!(validator, authority, ::level()); - validator.visit_log(authority, Log{level, msg}); - - if validator.verdict().is_ok() { - isi_validators!(@execute isi); - } - } $( - InstructionBox::$isi(isi) => { - validator.$validator(authority, isi); - - if validator.verdict().is_ok() { - isi_validators!(@execute isi); - } - } )+ $( - // NOTE: `visit_and_execute_instructions` is reentrant, so don't execute composite instructions - InstructionBox::$composite_isi(isi) => validator.$composite_validator(authority, isi), )+ - } - }; - (@execute $isi:ident) => { - // TODO: Execution should be infallible after successful validation - if let Err(err) = isi.execute() { - validator.deny(err); - } - } - } - - isi_validators! { - single { - visit_burn(Burn), - visit_fail(Fail), - visit_grant(Grant), - visit_mint(Mint), - visit_register(Register), - visit_remove_key_value(RemoveKeyValue), - visit_revoke(Revoke), - visit_set_key_value(SetKeyValue), - visit_transfer(Transfer), - visit_unregister(Unregister), - visit_upgrade(Upgrade), - } - - composite { - visit_sequence(Sequence), - visit_pair(Pair), - visit_if(If), - } - } -} - -pub fn visit_unsupported( - validator: &mut V, - _authority: &AccountId, - isi: T, -) { - deny!(validator, "{isi:?}: Unsupported operation"); -} - -pub fn visit_expression( - validator: &mut V, - authority: &AccountId, - expression: &EvaluatesTo, -) { - macro_rules! visit_binary_expression { - ($e:ident) => {{ - validator.visit_expression(authority, $e.left()); - - if validator.verdict().is_ok() { - validator.visit_expression(authority, $e.right()); - } - }}; - } - - match expression.expression() { - Expression::Add(expr) => visit_binary_expression!(expr), - Expression::Subtract(expr) => visit_binary_expression!(expr), - Expression::Multiply(expr) => visit_binary_expression!(expr), - Expression::Divide(expr) => visit_binary_expression!(expr), - Expression::Mod(expr) => visit_binary_expression!(expr), - Expression::RaiseTo(expr) => visit_binary_expression!(expr), - Expression::Greater(expr) => visit_binary_expression!(expr), - Expression::Less(expr) => visit_binary_expression!(expr), - Expression::Equal(expr) => visit_binary_expression!(expr), - Expression::Not(expr) => validator.visit_expression(authority, expr.expression()), - Expression::And(expr) => visit_binary_expression!(expr), - Expression::Or(expr) => visit_binary_expression!(expr), - Expression::If(expr) => { - validator.visit_expression(authority, expr.condition()); - - if validator.verdict().is_ok() { - validator.visit_expression(authority, expr.then()); - } - - if validator.verdict().is_ok() { - validator.visit_expression(authority, expr.otherwise()); - } - } - Expression::Contains(expr) => { - validator.visit_expression(authority, expr.collection()); - - if validator.verdict().is_ok() { - validator.visit_expression(authority, expr.element()); - } - } - Expression::ContainsAll(expr) => { - validator.visit_expression(authority, expr.collection()); - - if validator.verdict().is_ok() { - validator.visit_expression(authority, expr.elements()); - } - } - Expression::ContainsAny(expr) => { - validator.visit_expression(authority, expr.collection()); - - if validator.verdict().is_ok() { - validator.visit_expression(authority, expr.elements()); - } - } - Expression::Where(expr) => validator.visit_expression(authority, expr.expression()), - Expression::Query(query) => validator.visit_query(authority, query), - Expression::ContextValue(_) | Expression::Raw(_) => (), - } -} - -pub fn visit_if(validator: &mut V, authority: &AccountId, isi: &Conditional) { - let condition = evaluate_expr!(validator, authority, ::condition()); - - // TODO: Do we have to make sure both branches are syntactically valid? - if condition { - validator.visit_instruction(authority, isi.then()); - } else if let Some(otherwise) = isi.otherwise() { - validator.visit_instruction(authority, otherwise); - } -} - -pub fn visit_pair(validator: &mut V, authority: &AccountId, isi: &Pair) { - validator.visit_instruction(authority, isi.left_instruction()); - - if validator.verdict().is_ok() { - validator.visit_instruction(authority, isi.right_instruction()) - } -} - -pub fn visit_sequence( - validator: &mut V, - authority: &AccountId, - sequence: &SequenceBox, -) { - for isi in sequence.instructions() { - if validator.verdict().is_ok() { - validator.visit_instruction(authority, isi); - } - } -} - -pub mod peer { - use super::*; - - declare_tokens! { - crate::default::peer::tokens::CanUnregisterAnyPeer, - } - - pub mod tokens { - use super::*; - - token! { - #[derive(Copy, ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanUnregisterAnyPeer; - } - } - - #[allow(clippy::needless_pass_by_value)] - pub fn visit_unregister_peer( - validator: &mut V, - authority: &AccountId, - _isi: Unregister, - ) { - if is_genesis(validator) { - pass!(validator); - } - if tokens::CanUnregisterAnyPeer.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't unregister peer"); - } -} - -pub mod domain { - use super::*; - - declare_tokens! { - crate::default::domain::tokens::CanUnregisterDomain, - crate::default::domain::tokens::CanSetKeyValueInDomain, - crate::default::domain::tokens::CanRemoveKeyValueInDomain, - } - - pub mod tokens { - // TODO: We probably need a better way to allow accounts to modify domains. - use super::*; - - token! { - #[derive(ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanUnregisterDomain { - pub domain_id: DomainId, - } - } - - token! { - #[derive(ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanSetKeyValueInDomain { - pub domain_id: DomainId, - } - } - - token! { - #[derive(ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanRemoveKeyValueInDomain { - pub domain_id: DomainId, - } - } - } - - pub fn visit_unregister_domain( - validator: &mut V, - authority: &AccountId, - isi: Unregister, - ) { - let domain_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - let can_unregister_domain_token = tokens::CanUnregisterDomain { domain_id }; - if can_unregister_domain_token.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't unregister domain"); - } - - pub fn visit_set_domain_key_value( - validator: &mut V, - authority: &AccountId, - isi: SetKeyValue, - ) { - let domain_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - let can_set_key_value_in_domain_token = tokens::CanSetKeyValueInDomain { domain_id }; - if can_set_key_value_in_domain_token.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't set key value in domain metadata"); - } - - pub fn visit_remove_domain_key_value( - validator: &mut V, - authority: &AccountId, - isi: RemoveKeyValue, - ) { - let domain_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - let can_remove_key_value_in_domain_token = tokens::CanRemoveKeyValueInDomain { domain_id }; - if can_remove_key_value_in_domain_token.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't remove key value in domain metadata"); - } -} - -pub mod account { - use super::*; - - declare_tokens! { - crate::default::account::tokens::CanUnregisterAccount, - crate::default::account::tokens::CanMintUserPublicKeys, - crate::default::account::tokens::CanBurnUserPublicKeys, - crate::default::account::tokens::CanMintUserSignatureCheckConditions, - crate::default::account::tokens::CanSetKeyValueInUserAccount, - crate::default::account::tokens::CanRemoveKeyValueInUserAccount, - } - - pub mod tokens { - use super::*; - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] - #[validate(permission::account::Owner)] - pub struct CanUnregisterAccount { - pub account_id: AccountId, - } - } - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] - #[validate(permission::account::Owner)] - pub struct CanMintUserPublicKeys { - pub account_id: AccountId, - } - } - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] - #[validate(permission::account::Owner)] - pub struct CanBurnUserPublicKeys { - pub account_id: AccountId, - } - } - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] - #[validate(permission::account::Owner)] - pub struct CanMintUserSignatureCheckConditions { - pub account_id: AccountId, - } - } - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] - #[validate(permission::account::Owner)] - pub struct CanSetKeyValueInUserAccount { - pub account_id: AccountId, - } - } - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::account::Owner)] - #[validate(permission::account::Owner)] - pub struct CanRemoveKeyValueInUserAccount { - pub account_id: AccountId, - } - } - } - - pub fn visit_unregister_account( - validator: &mut V, - authority: &AccountId, - isi: Unregister, - ) { - let account_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - if account_id == *authority { - pass!(validator); - } - - let can_unregister_user_account = tokens::CanUnregisterAccount { account_id }; - if can_unregister_user_account.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't unregister another account"); - } - - pub fn visit_mint_account_public_key( - validator: &mut V, - authority: &AccountId, - isi: Mint, - ) { - let account_id = isi.destination_id; - - if is_genesis(validator) { - pass!(validator); - } - if account_id == *authority { - pass!(validator); - } - let can_mint_user_public_keys = tokens::CanMintUserPublicKeys { account_id }; - if can_mint_user_public_keys.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't mint public keys of another account"); - } - - pub fn visit_burn_account_public_key( - validator: &mut V, - authority: &AccountId, - isi: Burn, - ) { - let account_id = isi.destination_id; - - if is_genesis(validator) { - pass!(validator); - } - if account_id == *authority { - pass!(validator); - } - let can_burn_user_public_keys = tokens::CanBurnUserPublicKeys { account_id }; - if can_burn_user_public_keys.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't burn public keys of another account"); - } - - pub fn visit_mint_account_signature_check_condition( - validator: &mut V, - authority: &AccountId, - isi: Mint, - ) { - let account_id = isi.destination_id; - - if is_genesis(validator) { - pass!(validator); - } - if account_id == *authority { - pass!(validator); - } - let can_mint_user_signature_check_conditions_token = - tokens::CanMintUserSignatureCheckConditions { account_id }; - if can_mint_user_signature_check_conditions_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't mint signature check conditions of another account" - ); - } - - pub fn visit_set_account_key_value( - validator: &mut V, - authority: &AccountId, - isi: SetKeyValue, - ) { - let account_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - if account_id == *authority { - pass!(validator); - } - let can_set_key_value_in_user_account_token = - tokens::CanSetKeyValueInUserAccount { account_id }; - if can_set_key_value_in_user_account_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't set value to the metadata of another account" - ); - } - - pub fn visit_remove_account_key_value( - validator: &mut V, - authority: &AccountId, - isi: RemoveKeyValue, - ) { - let account_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - if account_id == *authority { - pass!(validator); - } - let can_remove_key_value_in_user_account_token = - tokens::CanRemoveKeyValueInUserAccount { account_id }; - if can_remove_key_value_in_user_account_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't remove value from the metadata of another account" - ); - } -} - -pub mod asset_definition { - use super::*; - - declare_tokens! { - crate::default::asset_definition::tokens::CanUnregisterAssetDefinition, - crate::default::asset_definition::tokens::CanSetKeyValueInAssetDefinition, - crate::default::asset_definition::tokens::CanRemoveKeyValueInAssetDefinition, - } - - pub mod tokens { - use super::*; - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] - #[validate(permission::asset_definition::Owner)] - pub struct CanUnregisterAssetDefinition { - pub asset_definition_id: AssetDefinitionId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] - #[validate(permission::asset_definition::Owner)] - pub struct CanSetKeyValueInAssetDefinition { - pub asset_definition_id: AssetDefinitionId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] - #[validate(permission::asset_definition::Owner)] - pub struct CanRemoveKeyValueInAssetDefinition { - pub asset_definition_id: AssetDefinitionId, - } - } - } - - pub(super) fn is_asset_definition_owner( - asset_definition_id: &AssetDefinitionId, - authority: &AccountId, - ) -> Result { - IsAssetDefinitionOwner::new(asset_definition_id.clone(), authority.clone()).execute() - } - - pub fn visit_unregister_asset_definition( - validator: &mut V, - authority: &AccountId, - isi: Unregister, - ) { - let asset_definition_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - match is_asset_definition_owner(&asset_definition_id, authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_unregister_asset_definition_token = tokens::CanUnregisterAssetDefinition { - asset_definition_id, - }; - if can_unregister_asset_definition_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't unregister assets registered by other accounts" - ); - } - - pub fn visit_transfer_asset_definition( - validator: &mut V, - authority: &AccountId, - isi: Transfer, - ) { - let source_id = isi.source_id; - let destination_id = isi.object; - - if is_genesis(validator) { - pass!(validator); - } - if &source_id == authority { - pass!(validator); - } - match is_asset_definition_owner(destination_id.id(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - - deny!( - validator, - "Can't transfer asset definition of another account" - ); - } - - pub fn visit_set_asset_definition_key_value( - validator: &mut V, - authority: &AccountId, - isi: SetKeyValue, - ) { - let asset_definition_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - match is_asset_definition_owner(&asset_definition_id, authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_set_key_value_in_asset_definition_token = tokens::CanSetKeyValueInAssetDefinition { - asset_definition_id, - }; - if can_set_key_value_in_asset_definition_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't set value to the asset definition metadata created by another account" - ); - } - - pub fn visit_remove_asset_definition_key_value( - validator: &mut V, - authority: &AccountId, - isi: RemoveKeyValue, - ) { - let asset_definition_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - match is_asset_definition_owner(&asset_definition_id, authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_remove_key_value_in_asset_definition_token = - tokens::CanRemoveKeyValueInAssetDefinition { - asset_definition_id, - }; - if can_remove_key_value_in_asset_definition_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't remove value from the asset definition metadata created by another account" - ); - } -} - -pub mod asset { - use super::*; - - declare_tokens! { - crate::default::asset::tokens::CanRegisterAssetsWithDefinition, - crate::default::asset::tokens::CanUnregisterAssetsWithDefinition, - crate::default::asset::tokens::CanUnregisterUserAsset, - crate::default::asset::tokens::CanBurnAssetsWithDefinition, - crate::default::asset::tokens::CanBurnUserAsset, - crate::default::asset::tokens::CanMintAssetsWithDefinition, - crate::default::asset::tokens::CanTransferAssetsWithDefinition, - crate::default::asset::tokens::CanTransferUserAsset, - crate::default::asset::tokens::CanSetKeyValueInUserAsset, - crate::default::asset::tokens::CanRemoveKeyValueInUserAsset, - } - - pub mod tokens { - use super::*; - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] - #[validate(permission::asset_definition::Owner)] - pub struct CanRegisterAssetsWithDefinition { - pub asset_definition_id: AssetDefinitionId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] - #[validate(permission::asset_definition::Owner)] - pub struct CanUnregisterAssetsWithDefinition { - pub asset_definition_id: AssetDefinitionId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] - #[validate(permission::asset::Owner)] - pub struct CanUnregisterUserAsset { - pub asset_id: AssetId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] - #[validate(permission::asset_definition::Owner)] - pub struct CanBurnAssetsWithDefinition { - pub asset_definition_id: AssetDefinitionId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] - #[validate(permission::asset::Owner)] - pub struct CanBurnUserAsset { - pub asset_id: AssetId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] - #[validate(permission::asset_definition::Owner)] - pub struct CanMintAssetsWithDefinition { - pub asset_definition_id: AssetDefinitionId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset_definition::Owner)] - #[validate(permission::asset_definition::Owner)] - pub struct CanTransferAssetsWithDefinition { - pub asset_definition_id: AssetDefinitionId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] - #[validate(permission::asset::Owner)] - pub struct CanTransferUserAsset { - pub asset_id: AssetId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] - #[validate(permission::asset::Owner)] - pub struct CanSetKeyValueInUserAsset { - pub asset_id: AssetId, - } - } - - token! { - #[derive(ValidateGrantRevoke, permission::derive_conversions::asset::Owner)] - #[validate(permission::asset::Owner)] - pub struct CanRemoveKeyValueInUserAsset { - pub asset_id: AssetId, - } - } - } - - fn is_asset_owner(asset_id: &AssetId, authority: &AccountId) -> bool { - asset_id.account_id() == authority - } - - pub fn visit_register_asset( - validator: &mut V, - authority: &AccountId, - isi: Register, - ) { - let asset = isi.object; - - if is_genesis(validator) { - pass!(validator); - } - match asset_definition::is_asset_definition_owner(asset.id().definition_id(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_register_assets_with_definition_token = tokens::CanRegisterAssetsWithDefinition { - asset_definition_id: asset.id().definition_id().clone(), - }; - if can_register_assets_with_definition_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't register assets with definitions registered by other accounts" - ); - } - - pub fn visit_unregister_asset( - validator: &mut V, - authority: &AccountId, - isi: Unregister, - ) { - let asset_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - if is_asset_owner(&asset_id, authority) { - pass!(validator); - } - match asset_definition::is_asset_definition_owner(asset_id.definition_id(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_unregister_assets_with_definition_token = - tokens::CanUnregisterAssetsWithDefinition { - asset_definition_id: asset_id.definition_id().clone(), - }; - if can_unregister_assets_with_definition_token.is_owned_by(authority) { - pass!(validator); - } - let can_unregister_user_asset_token = tokens::CanUnregisterUserAsset { asset_id }; - if can_unregister_user_asset_token.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't unregister asset from another account"); - } - - pub fn visit_mint_asset( - validator: &mut V, - authority: &AccountId, - isi: Mint, - ) { - let asset_id = isi.destination_id; - - if is_genesis(validator) { - pass!(validator); - } - match asset_definition::is_asset_definition_owner(asset_id.definition_id(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_mint_assets_with_definition_token = tokens::CanMintAssetsWithDefinition { - asset_definition_id: asset_id.definition_id().clone(), - }; - if can_mint_assets_with_definition_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't mint assets with definitions registered by other accounts" - ); - } - - pub fn visit_burn_asset( - validator: &mut V, - authority: &AccountId, - isi: Burn, - ) { - let asset_id = isi.destination_id; - - if is_genesis(validator) { - pass!(validator); - } - if is_asset_owner(&asset_id, authority) { - pass!(validator); - } - match asset_definition::is_asset_definition_owner(asset_id.definition_id(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_burn_assets_with_definition_token = tokens::CanBurnAssetsWithDefinition { - asset_definition_id: asset_id.definition_id().clone(), - }; - if can_burn_assets_with_definition_token.is_owned_by(authority) { - pass!(validator); - } - let can_burn_user_asset_token = tokens::CanBurnUserAsset { asset_id }; - if can_burn_user_asset_token.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't burn assets from another account"); - } - - pub fn visit_transfer_asset( - validator: &mut V, - authority: &AccountId, - isi: Transfer, - ) { - let asset_id = isi.source_id; - - if is_genesis(validator) { - pass!(validator); - } - if is_asset_owner(&asset_id, authority) { - pass!(validator); - } - match asset_definition::is_asset_definition_owner(asset_id.definition_id(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_transfer_assets_with_definition_token = tokens::CanTransferAssetsWithDefinition { - asset_definition_id: asset_id.definition_id().clone(), - }; - if can_transfer_assets_with_definition_token.is_owned_by(authority) { - pass!(validator); - } - let can_transfer_user_asset_token = tokens::CanTransferUserAsset { asset_id }; - if can_transfer_user_asset_token.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't transfer assets of another account"); - } - - pub fn visit_set_asset_key_value( - validator: &mut V, - authority: &AccountId, - isi: SetKeyValue, - ) { - let asset_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - if is_asset_owner(&asset_id, authority) { - pass!(validator); - } - - let can_set_key_value_in_user_asset_token = tokens::CanSetKeyValueInUserAsset { asset_id }; - if can_set_key_value_in_user_asset_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't set value to the asset metadata of another account" - ); - } - - pub fn visit_remove_asset_key_value( - validator: &mut V, - authority: &AccountId, - isi: RemoveKeyValue, - ) { - let asset_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - if is_asset_owner(&asset_id, authority) { - pass!(validator); - } - let can_remove_key_value_in_user_asset_token = - tokens::CanRemoveKeyValueInUserAsset { asset_id }; - if can_remove_key_value_in_user_asset_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't remove value from the asset metadata of another account" - ); - } -} - -pub mod parameter { - use permission::ValidateGrantRevoke; - - use super::*; - - declare_tokens!( - crate::default::parameter::tokens::CanGrantPermissionToCreateParameters, - crate::default::parameter::tokens::CanRevokePermissionToCreateParameters, - crate::default::parameter::tokens::CanCreateParameters, - crate::default::parameter::tokens::CanGrantPermissionToSetParameters, - crate::default::parameter::tokens::CanRevokePermissionToSetParameters, - crate::default::parameter::tokens::CanSetParameters, - ); - - pub mod tokens { - use super::*; - - token! { - #[derive(Copy, ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanGrantPermissionToCreateParameters; - } - - token! { - #[derive(Copy, ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanRevokePermissionToCreateParameters; - } - - token! { - #[derive(Copy)] - pub struct CanCreateParameters; - } - - impl ValidateGrantRevoke for CanCreateParameters { - fn validate_grant(&self, authority: &AccountId, _block_height: u64) -> Result { - if CanGrantPermissionToCreateParameters.is_owned_by(authority) { - return Ok(()); - } - - Err(ValidationFail::NotPermitted( - "Can't grant permission to create new configuration parameters outside genesis without permission from genesis" - .to_owned() - )) - } - - fn validate_revoke(&self, authority: &AccountId, _block_height: u64) -> Result { - if CanGrantPermissionToCreateParameters.is_owned_by(authority) { - return Ok(()); - } - - Err(ValidationFail::NotPermitted( - "Can't revoke permission to create new configuration parameters outside genesis without permission from genesis" - .to_owned() - )) - } - } - - token! { - #[derive(Copy, ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanGrantPermissionToSetParameters; - } - - token! { - #[derive(Copy, ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanRevokePermissionToSetParameters; - } - - token! { - #[derive(Copy)] - pub struct CanSetParameters; - } - - impl ValidateGrantRevoke for CanSetParameters { - fn validate_grant(&self, authority: &AccountId, _block_height: u64) -> Result { - if CanGrantPermissionToSetParameters.is_owned_by(authority) { - return Ok(()); - } - - Err(ValidationFail::NotPermitted( - "Can't grant permission to set configuration parameters outside genesis without permission from genesis" - .to_owned() - )) - } - - fn validate_revoke(&self, authority: &AccountId, _block_height: u64) -> Result { - if CanRevokePermissionToSetParameters.is_owned_by(authority) { - return Ok(()); - } - - Err(ValidationFail::NotPermitted( - "Can't revoke permission to set configuration parameters outside genesis without permission from genesis" - .to_owned() - )) - } - } - } - - #[allow(clippy::needless_pass_by_value)] - pub fn visit_new_parameter( - validator: &mut V, - authority: &AccountId, - _isi: NewParameter, - ) { - if is_genesis(validator) { - pass!(validator); - } - if tokens::CanCreateParameters.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't create new configuration parameters outside genesis without permission" - ); - } - - #[allow(clippy::needless_pass_by_value)] - pub fn visit_set_parameter( - validator: &mut V, - authority: &AccountId, - _isi: SetParameter, - ) { - if is_genesis(validator) { - pass!(validator); - } - if tokens::CanSetParameters.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't set configuration parameters without permission" - ); - } -} - -pub mod role { - use super::*; - - declare_tokens! { - crate::default::role::tokens::CanUnregisterAnyRole, - } - - pub mod tokens { - use super::*; - - token! { - #[derive(Copy, ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanUnregisterAnyRole; - } - } - - macro_rules! impl_validate { - ($validator:ident, $isi:ident, $authority:ident, $method:ident) => { - let role_id = $isi.object; - - let find_role_query_res = match FindRoleByRoleId::new(role_id).execute() { - Ok(res) => res, - Err(error) => { - deny!($validator, error); - } - }; - let role = Role::try_from(find_role_query_res) - .dbg_expect("Failed to convert `FindRoleByRoleId` query result to `Role`"); - - for token in role.permissions() { - macro_rules! visit_internal { - ($token_ty:ty) => { - if let Ok(concrete_token) = - <$token_ty as TryFrom<_>>::try_from(token.clone()) - { - if is_genesis($validator) { - continue; - } - if let Err(error) = - <$token_ty as permission::ValidateGrantRevoke>::$method( - &concrete_token, - $authority, - $validator.block_height(), - ) - { - deny!($validator, error); - } - - // Continue because token can correspond to only one concrete token - continue; - } - }; - } - - map_all_crate_tokens!(visit_internal); - deny!( - $validator, - "Incorrect validator implementation: Role contains unknown permission tokens" - ) - } - - pass!($validator); - }; - } - - #[allow(clippy::needless_pass_by_value)] - pub fn visit_register_role( - validator: &mut V, - _authority: &AccountId, - isi: Register, - ) { - let mut unknown_tokens = Vec::new(); - - let role = isi.object.inner(); - for token in role.permissions() { - iroha_wasm::debug!(&format!("Checking `{token:?}`")); - macro_rules! try_from_token { - ($token_ty:ty) => { - iroha_wasm::debug!(concat!("Trying `", stringify!($token_ty), "`")); - if <$token_ty as TryFrom<_>>::try_from(token.clone()).is_ok() { - iroha_wasm::debug!("Success!"); - // Continue because token can correspond to only one concrete token - continue; - } - }; - } - - map_all_crate_tokens!(try_from_token); - unknown_tokens.push(token); - } - - if !unknown_tokens.is_empty() { - deny!( - validator, - ValidationFail::NotPermitted(format!( - "{unknown_tokens:?}: Unrecognised permission tokens" - )) - ); - } - - pass!(validator); - } - - #[allow(clippy::needless_pass_by_value)] - pub fn visit_unregister_role( - validator: &mut V, - authority: &AccountId, - _isi: Unregister, - ) { - if is_genesis(validator) { - pass!(validator); - } - if tokens::CanUnregisterAnyRole.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't unregister role"); - } - - pub fn visit_grant_account_role( - validator: &mut V, - authority: &AccountId, - isi: Grant, - ) { - impl_validate!(validator, isi, authority, validate_grant); - } - - pub fn visit_revoke_account_role( - validator: &mut V, - authority: &AccountId, - isi: Revoke, - ) { - impl_validate!(validator, isi, authority, validate_revoke); - } -} - -pub mod trigger { - use permission::trigger::is_trigger_owner; - - use super::*; - - macro_rules! impl_froms { - ($($name:path),+ $(,)?) => {$( - impl<'token> From<&'token $name> for permission::trigger::Owner<'token> { - fn from(value: &'token $name) -> Self { - Self { - trigger_id: &value.trigger_id, - } - } - } - )+}; - } - - declare_tokens! { - crate::default::trigger::tokens::CanExecuteUserTrigger, - crate::default::trigger::tokens::CanUnregisterUserTrigger, - crate::default::trigger::tokens::CanMintUserTrigger, - } - - pub mod tokens { - use super::*; - - token! { - #[derive(ValidateGrantRevoke)] - #[validate(permission::trigger::Owner)] - pub struct CanExecuteUserTrigger { - pub trigger_id: TriggerId, - } - } - - token! { - #[derive(ValidateGrantRevoke)] - #[validate(permission::trigger::Owner)] - pub struct CanUnregisterUserTrigger { - pub trigger_id: TriggerId, - } - } - - token! { - #[derive(ValidateGrantRevoke)] - #[validate(permission::trigger::Owner)] - pub struct CanMintUserTrigger { - pub trigger_id: TriggerId, - } - } - } - - impl_froms!( - tokens::CanExecuteUserTrigger, - tokens::CanUnregisterUserTrigger, - tokens::CanMintUserTrigger, - ); - - pub fn visit_unregister_trigger( - validator: &mut V, - authority: &AccountId, - isi: Unregister>, - ) { - let trigger_id = isi.object_id; - - if is_genesis(validator) { - pass!(validator); - } - match is_trigger_owner(trigger_id.clone(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_unregister_user_trigger_token = tokens::CanUnregisterUserTrigger { trigger_id }; - if can_unregister_user_trigger_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't unregister trigger owned by another account" - ); - } - - pub fn visit_mint_trigger_repetitions( - validator: &mut V, - authority: &AccountId, - isi: Mint, u32>, - ) { - let trigger_id = isi.destination_id; - - if is_genesis(validator) { - pass!(validator); - } - match is_trigger_owner(trigger_id.clone(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_mint_user_trigger_token = tokens::CanMintUserTrigger { trigger_id }; - if can_mint_user_trigger_token.is_owned_by(authority) { - pass!(validator); - } - - deny!( - validator, - "Can't mint execution count for trigger owned by another account" - ); - } - - pub fn visit_execute_trigger( - validator: &mut V, - authority: &AccountId, - isi: ExecuteTrigger, - ) { - let trigger_id = isi.trigger_id; - - if is_genesis(validator) { - pass!(validator); - } - match is_trigger_owner(trigger_id.clone(), authority) { - Err(err) => deny!(validator, err), - Ok(true) => pass!(validator), - Ok(false) => {} - } - let can_execute_trigger_token = tokens::CanExecuteUserTrigger { trigger_id }; - if can_execute_trigger_token.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't execute trigger owned by another account"); - } -} - -pub mod permission_token { - use super::*; - - macro_rules! impl_validate { - ($validator:ident, $authority:ident, $self:ident, $method:ident) => { - let token = $self.object; - - macro_rules! visit_internal { - ($token_ty:ty) => { - if let Ok(token) = <$token_ty as TryFrom<_>>::try_from(token.clone()) { - if is_genesis($validator) { - pass!($validator); - } - if let Err(error) = <$token_ty as permission::ValidateGrantRevoke>::$method( - &token, - $authority, - $validator.block_height(), - ) { - deny!($validator, error); - } - - pass!($validator); - } - }; - } - - map_all_crate_tokens!(visit_internal); - - deny!( - $validator, - ValidationFail::NotPermitted(format!("{token:?}: Unknown permission token")) - ); - }; - } - - pub fn visit_grant_account_permission( - validator: &mut V, - authority: &AccountId, - isi: Grant, - ) { - impl_validate!(validator, authority, isi, validate_grant); - } - - pub fn visit_revoke_account_permission( - validator: &mut V, - authority: &AccountId, - isi: Revoke, - ) { - impl_validate!(validator, authority, isi, validate_revoke); - } -} - -pub mod validator { - use super::*; - - declare_tokens! { - crate::default::validator::tokens::CanUpgradeValidator, - } - - pub mod tokens { - use super::*; - - token! { - #[derive(Copy, ValidateGrantRevoke)] - #[validate(permission::OnlyGenesis)] - pub struct CanUpgradeValidator; - } - } - - #[allow(clippy::needless_pass_by_value)] - pub fn visit_upgrade_validator( - validator: &mut V, - authority: &AccountId, - _isi: Upgrade, - ) { - if is_genesis(validator) { - pass!(validator); - } - if tokens::CanUpgradeValidator.is_owned_by(authority) { - pass!(validator); - } - - deny!(validator, "Can't upgrade validator"); - } -} - -fn is_genesis(validator: &V) -> bool { - validator.block_height() == 0 -} diff --git a/wasm/validator/src/lib.rs b/wasm/validator/src/lib.rs deleted file mode 100644 index 348d8173f66..00000000000 --- a/wasm/validator/src/lib.rs +++ /dev/null @@ -1,170 +0,0 @@ -//! API for *Runtime Validators*. -#![no_std] - -extern crate alloc; -extern crate self as iroha_validator; - -use alloc::vec::Vec; - -pub use iroha_schema::MetaMap; -use iroha_wasm::data_model::{ - permission::PermissionTokenId, validator::Result, visit::Visit, ValidationFail, -}; -pub use iroha_wasm::{self, data_model}; - -pub mod default; -pub mod permission; - -/// Shortcut for `return Ok(())`. -#[macro_export] -macro_rules! pass { - ($validator:ident) => {{ - #[cfg(debug_assertions)] - if let Err(_error) = $validator.verdict() { - unreachable!("Validator already denied"); - } - - return; - }}; -} - -/// Shortcut for `return Err(ValidationFail)`. -/// -/// Supports [`format!`](alloc::fmt::format) syntax as well as any expression returning [`String`](alloc::string::String). -#[macro_export] -macro_rules! deny { - ($validator:ident, $l:literal $(,)?) => {{ - #[cfg(debug_assertions)] - if let Err(_error) = $validator.verdict() { - unreachable!("Validator already denied"); - } - $validator.deny(::iroha_validator::data_model::ValidationFail::NotPermitted( - ::alloc::fmt::format(::core::format_args!($l)), - )); - return; - }}; - ($validator:ident, $e:expr $(,)?) => {{ - #[cfg(debug_assertions)] - if let Err(_error) = $validator.verdict() { - unreachable!("Validator already denied"); - } - $validator.deny($e); - return; - }}; -} - -/// Macro to parse literal as a type. Panics if failed. -/// -/// # Example -/// -/// ```no_run -/// use iroha_wasm::parse; -/// use data_model::prelude::*; -/// -/// let account_id = parse!("alice@wonderland" as AccountId); -/// ``` -#[macro_export] -macro_rules! parse { - ($l:literal as _) => { - compile_error!( - "Don't use `_` as a type in this macro, \ - otherwise panic message would be less informative" - ) - }; - ($l:literal as $t:ty) => { - $crate::iroha_wasm::debug::DebugExpectExt::dbg_expect( - $l.parse::<$t>(), - concat!("Failed to parse `", $l, "` as `", stringify!($t), "`"), - ) - }; -} - -/// Declare token types of current module. Use it with a full path to the token. -/// -/// Used to iterate over token types to validate `Grant` and `Revoke` instructions. -/// -/// -/// TODO: Replace with procedural macro. Example: -/// ``` -/// #[tokens(path = "crate::current_module")] -/// mod tokens { -/// #[derive(Token, ...)] -/// pub struct MyToken; -/// } -/// ``` -#[macro_export] -macro_rules! declare_tokens { - ($($token_ty:ty),+ $(,)?) => { - macro_rules! map_tokens { - ($callback:ident) => {$( - $callback!($token_ty) - );+} - } - - pub(crate) use map_tokens; - } -} - -/// Collection of all permission tokens defined by the validator -#[derive(Debug, Clone, Default)] -pub struct PermissionTokenSchema(Vec, MetaMap); - -impl PermissionTokenSchema { - /// Remove permission token from this collection - pub fn remove(&mut self) { - let to_remove = ::name(); - - if let Some(pos) = self.0.iter().position(|token_id| *token_id == to_remove) { - self.0.remove(pos); - ::remove_from_schema(&mut self.1); - } - } - - /// Insert new permission token into this collection - pub fn insert(&mut self) { - ::update_schema_map(&mut self.1); - self.0.push(::name()); - } - - /// Serializes schema into a JSON string representation - pub fn serialize(mut self) -> (Vec, alloc::string::String) { - self.0.sort(); - - ( - self.0, - serde_json::to_string(&self.1).expect("schema serialization must not fail"), - ) - } -} - -/// Validator of Iroha operations -pub trait Validate: Visit { - /// Validator verdict. - fn verdict(&self) -> &Result; - - /// Current block height. - fn block_height(&self) -> u64; - - /// Set validator verdict to deny - fn deny(&mut self, reason: ValidationFail); -} - -pub mod prelude { - //! Contains useful re-exports - - pub use alloc::vec::Vec; - - pub use iroha_validator_derive::{entrypoint, Token, ValidateGrantRevoke}; - pub use iroha_wasm::{ - data_model::{ - prelude::*, - validator::{MigrationError, MigrationResult, Result}, - visit::Visit, - ValidationFail, - }, - prelude::*, - Context, - }; - - pub use super::{declare_tokens, deny, pass, PermissionTokenSchema, Validate}; -} diff --git a/wasm_builder/Cargo.toml b/wasm_builder/Cargo.toml index 64954992991..00434bd7c2c 100644 --- a/wasm_builder/Cargo.toml +++ b/wasm_builder/Cargo.toml @@ -8,9 +8,12 @@ authors.workspace = true license.workspace = true categories = ["development-tools::build-utils"] +[lints] +workspace = true + [dependencies] eyre = { workspace = true } serde_json = { workspace = true, features = ["std"] } -sha256 = "1.2.2" +sha256 = "1.4.0" path-absolutize = { workspace = true } -wasm-opt = "0.113.0" +wasm-opt = "0.116.0" diff --git a/wasm_builder/src/lib.rs b/wasm_builder/src/lib.rs index 1ddbe8a19dc..1cc36788b22 100644 --- a/wasm_builder/src/lib.rs +++ b/wasm_builder/src/lib.rs @@ -1,4 +1,4 @@ -//! Crate with helper tool to build smartcontracts (e.g. triggers and validators) for Iroha 2. +//! Crate with helper tool to build smartcontracts (e.g. triggers and executors) for Iroha 2. //! //! See [`Builder`] for more details. @@ -16,7 +16,7 @@ use path_absolutize::Absolutize; /// Current toolchain used to build smartcontracts const TOOLCHAIN: &str = "+nightly-2023-06-25"; -/// WASM Builder for smartcontracts (e.g. triggers and validators). +/// WASM Builder for smartcontracts (e.g. triggers and executors). /// /// # Example /// @@ -46,6 +46,8 @@ pub struct Builder<'path, 'out_dir> { out_dir: Option<&'out_dir Path>, /// Flag to enable smartcontract formatting format: bool, + /// Flag controlling whether to show output of the build process + show_output: bool, } impl<'path, 'out_dir> Builder<'path, 'out_dir> { @@ -60,6 +62,7 @@ impl<'path, 'out_dir> Builder<'path, 'out_dir> { path: relative_path.as_ref(), out_dir: None, format: false, + show_output: false, } } @@ -84,6 +87,14 @@ impl<'path, 'out_dir> Builder<'path, 'out_dir> { self } + /// Enable showing output of the build process. + /// + /// Disabled by default. + pub fn show_output(mut self) -> Self { + self.show_output = true; + self + } + /// Apply `cargo check` to the smartcontract. /// /// # Errors @@ -116,6 +127,7 @@ impl<'path, 'out_dir> Builder<'path, 'out_dir> { |out_dir| Ok(Cow::Borrowed(out_dir)), )?, format: self.format, + show_output: self.show_output, }) } @@ -169,6 +181,7 @@ mod internal { pub absolute_path: PathBuf, pub out_dir: Cow<'out_dir, Path>, pub format: bool, + pub show_output: bool, } impl Builder<'_> { @@ -223,13 +236,11 @@ mod internal { } fn format_smartcontract(&self) -> Result<()> { - let command_output = cargo_command() - .current_dir(&self.absolute_path) - .arg("fmt") - .output() - .wrap_err("Failed to run `cargo fmt`")?; - - check_command_output(&command_output, "cargo fmt") + check_command( + self.show_output, + cargo_command().current_dir(&self.absolute_path).arg("fmt"), + "cargo fmt", + ) } fn get_base_command(&self, cmd: &'static str) -> std::process::Command { @@ -243,12 +254,9 @@ mod internal { } fn check_smartcontract(&self) -> Result<()> { - let command_output = self - .get_base_command("check") - .output() - .wrap_err("Failed to run `cargo check`")?; + let command = &mut self.get_base_command("check"); - check_command_output(&command_output, "cargo check") + check_command(self.show_output, command, "cargo check") } fn build_smartcontract(self) -> Result { @@ -271,13 +279,12 @@ mod internal { None }; - let command_output = self - .get_base_command("build") - .env("CARGO_TARGET_DIR", self.out_dir.as_ref()) - .output() - .wrap_err("Failed to run `cargo build`")?; - - check_command_output(&command_output, "cargo build")?; + check_command( + self.show_output, + self.get_base_command("build") + .env("CARGO_TARGET_DIR", self.out_dir.as_ref()), + "cargo build", + )?; Ok(Output { wasm_file, @@ -386,6 +393,7 @@ impl Output { } /// Get the file path of the underlying WASM + #[must_use] pub fn wasm_file_path(&self) -> &PathBuf { &self.wasm_file } @@ -409,15 +417,35 @@ fn cargo_command() -> Command { cargo } -fn check_command_output(command_output: &std::process::Output, command_name: &str) -> Result<()> { - if !command_output.status.success() { +fn check_command_output(output: &std::process::Output, command_name: &str) -> Result<()> { + if output.status.success() { + Ok(()) + } else { bail!( "`{}` returned non zero exit code ({}). Stderr:\n{}", command_name, - command_output.status, - String::from_utf8_lossy(&command_output.stderr) + output.status, + String::from_utf8_lossy(&output.stderr) ); } +} - Ok(()) +fn check_command(show_output: bool, command: &mut Command, command_name: &str) -> Result<()> { + if show_output { + let status = command + .status() + .wrap_err(format!("Failed to run `{command_name}`"))?; + if status.success() { + Ok(()) + } else { + bail!( + "`{command_name}` returned non zero exit code ({status}). See messages above for the probable error", + ); + } + } else { + let output = command + .output() + .wrap_err(format!("Failed to run `{command_name}`"))?; + check_command_output(&output, command_name) + } } diff --git a/wasm_codec/Cargo.toml b/wasm_codec/Cargo.toml index 302bcad24ea..ae18d83325f 100644 --- a/wasm_codec/Cargo.toml +++ b/wasm_codec/Cargo.toml @@ -6,9 +6,11 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true [dependencies] -iroha_core_wasm_codec_derive = { version = "=2.0.0-pre-rc.19", path = "derive" } +iroha_core_wasm_codec_derive = { version = "=2.0.0-pre-rc.20", path = "derive" } thiserror = { workspace = true } wasmtime = { workspace = true } diff --git a/wasm_codec/derive/Cargo.toml b/wasm_codec/derive/Cargo.toml index 5cbc4f6dbc4..27119faf744 100644 --- a/wasm_codec/derive/Cargo.toml +++ b/wasm_codec/derive/Cargo.toml @@ -6,6 +6,9 @@ authors.workspace = true license.workspace = true +[lints] +workspace = true + [lib] proc-macro = true diff --git a/wasm_codec/derive/src/lib.rs b/wasm_codec/derive/src/lib.rs index 8560067d80d..2053fd0d8e3 100644 --- a/wasm_codec/derive/src/lib.rs +++ b/wasm_codec/derive/src/lib.rs @@ -68,7 +68,7 @@ pub fn wrap(attr: TokenStream, item: TokenStream) -> TokenStream { let ident = &fn_item.sig.ident; let mut inner_fn_item = fn_item.clone(); - let inner_fn_ident = syn::Ident::new(&format!("__{}_inner", ident), ident.span()); + let inner_fn_ident = syn::Ident::new(&format!("__{ident}_inner"), ident.span()); inner_fn_item.sig.ident = inner_fn_ident.clone(); let fn_class = classify_fn(&fn_item.sig); @@ -113,7 +113,7 @@ pub fn wrap_trait_fn(attr: TokenStream, item: TokenStream) -> TokenStream { let ident = &fn_item.sig.ident; let mut inner_fn_item = fn_item.clone(); - let inner_fn_ident = syn::Ident::new(&format!("__{}_inner", ident), ident.span()); + let inner_fn_ident = syn::Ident::new(&format!("__{ident}_inner"), ident.span()); inner_fn_item.sig.ident = inner_fn_ident; let fn_class = classify_fn(&fn_item.sig); @@ -203,7 +203,7 @@ impl TokenStream2> LazyTokenStream { impl TokenStream2> quote::ToTokens for LazyTokenStream { fn to_tokens(&self, tokens: &mut TokenStream2) { let inner = &*self.0; - inner.to_tokens(tokens) + inner.to_tokens(tokens); } } diff --git a/wasm_codec/src/lib.rs b/wasm_codec/src/lib.rs index 9c77ab201c0..ecca6e08d47 100644 --- a/wasm_codec/src/lib.rs +++ b/wasm_codec/src/lib.rs @@ -1,7 +1,5 @@ //! This crate provides utils for encoding/decoding objects between Iroha host and Wasm smart contracts. -#![allow(clippy::arithmetic_side_effects)] - pub use iroha_core_wasm_codec_derive::{wrap, wrap_trait_fn}; use parity_scale_codec::{DecodeAll, Encode, Error as ParityError}; use wasmtime::Result; @@ -29,7 +27,6 @@ pub enum Error { /// # Errors /// /// Fails with [`Error`] which will be converted into [`wasmtime::Error`] if decoding fails. -#[allow(clippy::arithmetic_side_effects)] pub fn decode_from_memory( memory: &wasmtime::Memory, context: &C, @@ -53,7 +50,8 @@ pub fn decode_from_memory( /// /// - Failed to decode object /// - Failed to call `dealloc_fn` -#[allow(clippy::expect_used, clippy::unwrap_in_result)] +// NOTE: Panic is predicated by implementation not user input +#[allow(clippy::missing_panics_doc)] pub fn decode_with_length_prefix_from_memory< C: wasmtime::AsContextMut, T: DecodeAll + std::fmt::Debug, @@ -92,6 +90,8 @@ pub fn decode_with_length_prefix_from_memory< /// /// - If failed to call `alloc_fn` /// - If failed to write into the `memory` +// NOTE: Panic is predicated by implementation not user input +#[allow(clippy::missing_panics_doc)] pub fn encode_into_memory( obj: &T, memory: &wasmtime::Memory, @@ -123,10 +123,12 @@ pub fn encode_into_memory( /// `WebAssembly` it's not possible to return two values from a wasm function without some /// shenanignas. In those cases, only one value is sent which is pointer to the allocation /// with the first element being the length of the encoded object following it. +// NOTE: Panic is predicated by implementation not user input +#[allow(clippy::missing_panics_doc)] pub fn encode_with_length_prefix(obj: &T) -> Vec { // Compile-time size check #[allow(clippy::let_unit_value)] - let _ = SizeChecker::::RESULT; + let () = SizeChecker::::RESULT; let len_size_bytes = core::mem::size_of::();