From a4548708d17c520bc844e763fd68c18c2ab8920c Mon Sep 17 00:00:00 2001 From: Conor Schaefer Date: Fri, 17 Jan 2025 18:00:36 +0000 Subject: [PATCH] Revert "penumbra: update ecosystem tendermint/ibc crates (#4980)" (#4993) ## Describe your changes This reverts commit 64c32ef4bf33c929560ae2fc46ac127030c01f68, which constitutes the squash-merge of PR #4980. We're backing this change out strictly to simplify release engineering: we want the `main` branch to remain fully compatible with the `0.81.x` series, and we'll continue QA of significant version changes in a parallel release branch, `release/v0.82.x`. I'll handle preparing the latter shortly. ## Issue ticket number and link See related discussion in #4988 & #4991. ## Testing and review This is a programmatic change, in that I simply ran `git revert 64c32ef4b`, wrote some notes into the commit message, and pushed it up. I also made sure to rerun `just proto` to regenerate the protos, and confirmed there are no changes. That's good, that's precisely what we wanted to see. Preferably this change would land before #4992, since #4992 changes protos. I'll regenerate protos in 4992 on top of this once it lands on main. ## Checklist before requesting a review - [x] I have added guiding text to explain how a reviewer should test these changes. - [x] If this code contains consensus-breaking changes, I have added the "consensus-breaking" label. Otherwise, I declare my belief that there are not consensus-breaking changes, for the following reason: > This commit is expressly intended to preserve protocol compatibility with 0.81.x. Future work on QA to ensure compat with 0.82 will happen in a separate branch. --- .github/workflows/buf-pull-request.yml | 1 + .github/workflows/rust.yml | 2 +- Cargo.lock | 1945 ++++++++--------- Cargo.toml | 69 +- crates/bench/Cargo.toml | 2 +- crates/bin/pcli/Cargo.toml | 2 - crates/bin/pcli/src/command/query.rs | 14 +- crates/bin/pcli/src/command/tx.rs | 2 +- crates/bin/pcli/src/main.rs | 7 - crates/bin/pcli/src/network.rs | 21 +- crates/bin/pclientd/Cargo.toml | 1 - crates/bin/pclientd/src/lib.rs | 8 +- crates/bin/pclientd/src/main.rs | 7 - crates/bin/pclientd/src/proxy.rs | 66 +- crates/bin/pd/Cargo.toml | 13 +- crates/bin/pd/src/main.rs | 34 +- crates/bin/pmonitor/Cargo.toml | 1 - crates/bin/pmonitor/src/main.rs | 26 +- crates/cnidarium/Cargo.toml | 46 + crates/cnidarium/src/cache.rs | 125 ++ crates/cnidarium/src/delta.rs | 513 +++++ crates/cnidarium/src/escaped_byte_slice.rs | 34 + crates/cnidarium/src/future.rs | 763 +++++++ .../src/gen/penumbra.cnidarium.v1.rs | 815 +++++++ .../src/gen/penumbra.cnidarium.v1.serde.rs | 1433 ++++++++++++ .../src/gen/penumbra.cnidarium.v1alpha1.rs | 671 ++++++ .../gen/penumbra.cnidarium.v1alpha1.serde.rs | 1047 +++++++++ .../src/gen/proto_descriptor.bin.no_lfs | Bin 0 -> 102050 bytes crates/cnidarium/src/lib.rs | 91 + crates/cnidarium/src/metrics.rs | 33 + crates/cnidarium/src/read.rs | 247 +++ crates/cnidarium/src/rpc.rs | 256 +++ crates/cnidarium/src/snapshot.rs | 558 +++++ .../cnidarium/src/snapshot/rocks_wrapper.rs | 83 + crates/cnidarium/src/snapshot_cache.rs | 228 ++ crates/cnidarium/src/storage.rs | 611 ++++++ crates/cnidarium/src/storage/temp.rs | 43 + crates/cnidarium/src/store.rs | 2 + crates/cnidarium/src/store/multistore.rs | 233 ++ crates/cnidarium/src/store/substore.rs | 546 +++++ crates/cnidarium/src/tests.rs | 1468 +++++++++++++ crates/cnidarium/src/tests/delta.rs | 721 ++++++ crates/cnidarium/src/utils.rs | 34 + crates/cnidarium/src/write.rs | 74 + crates/cnidarium/src/write_batch.rs | 59 + crates/cnidarium/tests/migration.rs | 1190 ++++++++++ crates/cnidarium/tests/substore_tests.rs | 751 +++++++ crates/cnidarium/tests/write_batch.rs | 302 +++ crates/core/app/Cargo.toml | 4 +- crates/core/app/src/rpc.rs | 48 +- crates/core/app/src/server/consensus.rs | 5 +- crates/core/app/src/server/events.rs | 27 +- crates/core/app/src/server/info.rs | 2 - ...p_can_sweep_a_collection_of_small_notes.rs | 4 +- .../core/app/tests/common/ibc_tests/node.rs | 5 +- .../app/tests/common/ibc_tests/relayer.rs | 42 +- .../app/tests/mock_consensus_block_proving.rs | 11 +- .../view_server_can_be_served_on_localhost.rs | 4 +- crates/core/component/auction/Cargo.toml | 2 +- .../msg_handler/channel_close_confirm.rs | 1 - .../component/msg_handler/channel_open_ack.rs | 1 - .../msg_handler/channel_open_confirm.rs | 1 - .../msg_handler/channel_open_init.rs | 1 - .../component/msg_handler/channel_open_try.rs | 2 - .../core/component/ibc/src/component/rpc.rs | 2 +- .../ibc/src/component/rpc/consensus_query.rs | 30 +- .../core/component/shielded-pool/Cargo.toml | 1 - .../src/component/rpc/bank_query.rs | 22 +- crates/core/keys/src/symmetric.rs | 1 + crates/crypto/proof-params/Cargo.toml | 2 +- crates/misc/measure/Cargo.toml | 1 - crates/misc/measure/src/main.rs | 26 +- crates/misc/tct-visualize/Cargo.toml | 4 +- .../tct-visualize/src/bin/tct-live-edit.rs | 7 +- crates/misc/tct-visualize/src/live/view.rs | 19 +- crates/proto/Cargo.toml | 3 +- crates/proto/src/box_grpc_svc.rs | 11 +- crates/proto/src/event.rs | 25 +- crates/proto/src/gen/cosmos.app.v1alpha1.rs | 19 +- crates/proto/src/gen/cosmos.auth.v1beta1.rs | 27 +- crates/proto/src/gen/cosmos.bank.v1beta1.rs | 303 ++- .../proto/src/gen/cosmos.base.abci.v1beta1.rs | 69 +- .../src/gen/cosmos.base.query.v1beta1.rs | 13 +- crates/proto/src/gen/cosmos.base.v1beta1.rs | 13 +- .../src/gen/cosmos.crypto.multisig.v1beta1.rs | 13 +- crates/proto/src/gen/cosmos.tx.config.v1.rs | 9 +- .../src/gen/cosmos.tx.signing.v1beta1.rs | 50 +- crates/proto/src/gen/cosmos.tx.v1beta1.rs | 316 ++- crates/proto/src/gen/cosmos_proto.rs | 19 +- crates/proto/src/gen/google.api.rs | 20 +- .../src/gen/ibc.applications.transfer.v1.rs | 101 +- crates/proto/src/gen/ibc.core.channel.v1.rs | 133 +- crates/proto/src/gen/ibc.core.client.v1.rs | 113 +- .../proto/src/gen/ibc.core.connection.v1.rs | 101 +- crates/proto/src/gen/noble.forwarding.v1.rs | 281 ++- crates/proto/src/gen/penumbra.cnidarium.v1.rs | 815 +++++++ .../src/gen/penumbra.cnidarium.v1.serde.rs | 1433 ++++++++++++ crates/proto/src/gen/penumbra.core.app.v1.rs | 132 +- .../src/gen/penumbra.core.app.v1.serde.rs | 3 - .../proto/src/gen/penumbra.core.asset.v1.rs | 92 +- .../src/gen/penumbra.core.asset.v1.serde.rs | 5 - .../gen/penumbra.core.component.auction.v1.rs | 237 +- ...enumbra.core.component.auction.v1.serde.rs | 10 - ...numbra.core.component.community_pool.v1.rs | 121 +- ...enumbra.core.component.compact_block.v1.rs | 164 +- ...a.core.component.compact_block.v1.serde.rs | 5 - .../src/gen/penumbra.core.component.dex.v1.rs | 747 +++---- .../penumbra.core.component.dex.v1.serde.rs | 36 - ...enumbra.core.component.distributions.v1.rs | 21 +- ...a.core.component.distributions.v1.serde.rs | 1 - .../src/gen/penumbra.core.component.fee.v1.rs | 155 +- .../penumbra.core.component.fee.v1.serde.rs | 8 - .../gen/penumbra.core.component.funding.v1.rs | 23 +- ...enumbra.core.component.funding.v1.serde.rs | 1 - .../penumbra.core.component.governance.v1.rs | 614 +++--- ...mbra.core.component.governance.v1.serde.rs | 39 - .../src/gen/penumbra.core.component.ibc.v1.rs | 75 +- .../penumbra.core.component.ibc.v1.serde.rs | 4 - .../src/gen/penumbra.core.component.sct.v1.rs | 248 +-- .../penumbra.core.component.sct.v1.serde.rs | 16 - ...enumbra.core.component.shielded_pool.v1.rs | 445 ++-- ...a.core.component.shielded_pool.v1.serde.rs | 22 - .../gen/penumbra.core.component.stake.v1.rs | 418 ++-- .../penumbra.core.component.stake.v1.serde.rs | 35 - crates/proto/src/gen/penumbra.core.keys.v1.rs | 80 +- .../src/gen/penumbra.core.keys.v1.serde.rs | 10 - crates/proto/src/gen/penumbra.core.num.v1.rs | 9 +- .../src/gen/penumbra.core.num.v1.serde.rs | 2 - .../src/gen/penumbra.core.transaction.v1.rs | 197 +- .../gen/penumbra.core.transaction.v1.serde.rs | 5 - .../proto/src/gen/penumbra.core.txhash.v1.rs | 13 +- .../src/gen/penumbra.core.txhash.v1.serde.rs | 2 - .../gen/penumbra.crypto.decaf377_fmd.v1.rs | 7 +- .../penumbra.crypto.decaf377_fmd.v1.serde.rs | 1 - .../gen/penumbra.crypto.decaf377_frost.v1.rs | 43 +- ...penumbra.crypto.decaf377_frost.v1.serde.rs | 4 - .../gen/penumbra.crypto.decaf377_rdsa.v1.rs | 19 +- .../penumbra.crypto.decaf377_rdsa.v1.serde.rs | 3 - .../proto/src/gen/penumbra.crypto.tct.v1.rs | 25 +- .../src/gen/penumbra.crypto.tct.v1.serde.rs | 6 - .../src/gen/penumbra.custody.threshold.v1.rs | 102 +- .../penumbra.custody.threshold.v1.serde.rs | 11 - crates/proto/src/gen/penumbra.custody.v1.rs | 174 +- .../src/gen/penumbra.custody.v1.serde.rs | 2 - .../src/gen/penumbra.tools.summoning.v1.rs | 160 +- .../gen/penumbra.tools.summoning.v1.serde.rs | 22 - .../gen/penumbra.util.tendermint_proxy.v1.rs | 196 +- ...penumbra.util.tendermint_proxy.v1.serde.rs | 25 - crates/proto/src/gen/penumbra.view.v1.rs | 813 ++++--- .../proto/src/gen/penumbra.view.v1.serde.rs | 24 - .../proto/src/gen/proto_descriptor.bin.no_lfs | Bin 643923 -> 649637 bytes crates/proto/src/gen/tendermint.abci.rs | 475 ++-- crates/proto/src/gen/tendermint.crypto.rs | 38 +- crates/proto/src/gen/tendermint.p2p.rs | 27 +- crates/proto/src/gen/tendermint.types.rs | 188 +- crates/proto/src/gen/tendermint.version.rs | 15 +- crates/proto/src/lib.rs | 7 + .../proto/src/protobuf/tendermint_compat.rs | 33 +- crates/test/mock-consensus/src/block.rs | 2 +- crates/util/auto-https/Cargo.toml | 4 +- crates/util/auto-https/src/lib.rs | 1 + crates/util/tower-trace/src/lib.rs | 2 +- crates/view/Cargo.toml | 3 +- crates/view/src/service.rs | 41 +- crates/view/src/storage/sct.rs | 7 +- crates/view/src/worker.rs | 5 +- flake.lock | 29 +- rust-toolchain.toml | 2 +- tools/proto-compiler/Cargo.lock | 426 ++-- tools/proto-compiler/Cargo.toml | 18 +- tools/proto-compiler/src/main.rs | 48 +- tools/summonerd/src/main.rs | 17 +- tools/summonerd/src/web.rs | 6 +- 173 files changed, 19947 insertions(+), 6439 deletions(-) create mode 100644 crates/cnidarium/Cargo.toml create mode 100644 crates/cnidarium/src/cache.rs create mode 100644 crates/cnidarium/src/delta.rs create mode 100644 crates/cnidarium/src/escaped_byte_slice.rs create mode 100644 crates/cnidarium/src/future.rs create mode 100644 crates/cnidarium/src/gen/penumbra.cnidarium.v1.rs create mode 100644 crates/cnidarium/src/gen/penumbra.cnidarium.v1.serde.rs create mode 100644 crates/cnidarium/src/gen/penumbra.cnidarium.v1alpha1.rs create mode 100644 crates/cnidarium/src/gen/penumbra.cnidarium.v1alpha1.serde.rs create mode 100644 crates/cnidarium/src/gen/proto_descriptor.bin.no_lfs create mode 100644 crates/cnidarium/src/lib.rs create mode 100644 crates/cnidarium/src/metrics.rs create mode 100644 crates/cnidarium/src/read.rs create mode 100644 crates/cnidarium/src/rpc.rs create mode 100644 crates/cnidarium/src/snapshot.rs create mode 100644 crates/cnidarium/src/snapshot/rocks_wrapper.rs create mode 100644 crates/cnidarium/src/snapshot_cache.rs create mode 100644 crates/cnidarium/src/storage.rs create mode 100644 crates/cnidarium/src/storage/temp.rs create mode 100644 crates/cnidarium/src/store.rs create mode 100644 crates/cnidarium/src/store/multistore.rs create mode 100644 crates/cnidarium/src/store/substore.rs create mode 100644 crates/cnidarium/src/tests.rs create mode 100644 crates/cnidarium/src/tests/delta.rs create mode 100644 crates/cnidarium/src/utils.rs create mode 100644 crates/cnidarium/src/write.rs create mode 100644 crates/cnidarium/src/write_batch.rs create mode 100644 crates/cnidarium/tests/migration.rs create mode 100644 crates/cnidarium/tests/substore_tests.rs create mode 100644 crates/cnidarium/tests/write_batch.rs create mode 100644 crates/proto/src/gen/penumbra.cnidarium.v1.rs create mode 100644 crates/proto/src/gen/penumbra.cnidarium.v1.serde.rs diff --git a/.github/workflows/buf-pull-request.yml b/.github/workflows/buf-pull-request.yml index ead2ad48f3..6772e52e04 100644 --- a/.github/workflows/buf-pull-request.yml +++ b/.github/workflows/buf-pull-request.yml @@ -78,6 +78,7 @@ jobs: ./deployments/scripts/protobuf-codegen # https://github.com/penumbra-zone/penumbra/issues/3038#issuecomment-1722534133 git checkout crates/proto/src/gen/proto_descriptor.bin.no_lfs + git checkout crates/cnidarium/src/gen/proto_descriptor.bin.no_lfs s="$(git status --porcelain)" if [[ -n "$s" ]]; then echo "ERROR: protobuf files must be regenerated and committed." diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index c173160e9d..10ac52dd7b 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -48,7 +48,7 @@ jobs: fi - name: Run tests with nextest - run: cargo nextest run --release + run: cargo nextest run --release --features migration env: CARGO_TERM_COLOR: always diff --git a/Cargo.lock b/Cargo.lock index 8db15a1b72..324b6855da 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -208,7 +208,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ed4aa4fe255d0bc6d79373f7e31d2ea147bcf486cba1be5ba7ea85abdb92348" dependencies = [ - "quote", + "quote 1.0.36", "syn 1.0.109", ] @@ -220,8 +220,8 @@ checksum = "7abe79b0e4288889c4574159ab790824d0033b9fdcb2a112a3182fac2e514565" dependencies = [ "num-bigint", "num-traits", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -302,8 +302,8 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae3281bc6d0fd7e549af32b52511e1302185bd688fd3359fa36423346ff682ea" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -366,7 +366,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87bf87e6e8b47264efa9bde63d6225c6276a52e05e91bf37eaa8afd0032d6b71" dependencies = [ "askama_shared", - "proc-macro2 1.0.92", + "proc-macro2 1.0.86", "syn 1.0.109", ] @@ -389,8 +389,8 @@ dependencies = [ "nom", "num-traits", "percent-encoding", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "serde", "syn 1.0.109", "toml 0.5.11", @@ -398,9 +398,9 @@ dependencies = [ [[package]] name = "asn1-rs" -version = "0.6.2" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5493c3bedbacf7fd7382c6346bbd66687d12bbaad3a89a2d2c303ee6cf20b048" +checksum = "30ff05a702273012438132f449575dbc804e27b2f3cbe3069aa237d26c98fa33" dependencies = [ "asn1-rs-derive", "asn1-rs-impl", @@ -408,31 +408,31 @@ dependencies = [ "nom", "num-traits", "rusticata-macros", - "thiserror 1.0.61", + "thiserror", "time", ] [[package]] name = "asn1-rs-derive" -version = "0.5.1" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490" +checksum = "db8b7511298d5b7784b40b092d9e9dcd3a627a5707e4b5e507931ab0d44eeebf" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 1.0.109", "synstructure", ] [[package]] name = "asn1-rs-impl" -version = "0.2.0" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" +checksum = "2777730b2039ac0f95f093556e61b6d26cebed5393ca6f152717777cec3a42ed" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 1.0.109", ] [[package]] @@ -464,56 +464,55 @@ dependencies = [ [[package]] name = "async-http-codec" -version = "0.8.0" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "096146020b08dbc4587685b0730a7ba905625af13c65f8028035cdfd69573c91" +checksum = "afc4f0600c43df768851edad95ad43119ebde70e2feec8e39b91f97c9b62029e" dependencies = [ "anyhow", "futures", - "http 1.2.0", + "http", "httparse", "log", ] [[package]] name = "async-io" -version = "2.4.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" +checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" dependencies = [ "async-lock", + "autocfg", "cfg-if", "concurrent-queue", - "futures-io", - "futures-lite", + "futures-lite 1.13.0", + "log", "parking", "polling", - "rustix", + "rustix 0.37.27", "slab", - "tracing", - "windows-sys 0.59.0", + "socket2 0.4.10", + "waker-fn", ] [[package]] name = "async-lock" -version = "3.4.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" dependencies = [ - "event-listener", - "event-listener-strategy", - "pin-project-lite", + "event-listener 2.5.3", ] [[package]] name = "async-net" -version = "2.0.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b948000fad4873c1c9339d60f2623323a0cfd3816e5181033c6a5cb68b2accf7" +checksum = "0434b1ed18ce1cf5769b8ac540e33f01fa9471058b5e89da9e06f3c882a8c12f" dependencies = [ "async-io", "blocking", - "futures-lite", + "futures-lite 1.13.0", ] [[package]] @@ -533,9 +532,9 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -550,27 +549,53 @@ version = "0.1.80" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6fa2087f2753a7da8cc1c0dbfcf89579dd57458e36769de5ac750b4671737ca" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] name = "async-web-client" -version = "0.6.2" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37381fb4fad3cd9b579628c21a58f528ef029d1f072d10f16cb9431aa2236d29" +checksum = "3838368c36426d00ad882467ea2308b17eceee33a10d840c2c90fd0923ee3b7e" dependencies = [ "async-http-codec", "async-net", + "async-ws", "futures", "futures-rustls", - "http 1.2.0", + "gloo-net", + "http", + "js-sys", "lazy_static", "log", - "rustls-pki-types", - "thiserror 1.0.61", - "webpki-roots", + "rustls 0.21.12", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots 0.25.4", +] + +[[package]] +name = "async-ws" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b9363f44ce91e6425b719432e3ff359a803434af3e577754a07e5d9ddbeda4" +dependencies = [ + "async-io", + "base64 0.13.1", + "futures", + "futures-lite 1.13.0", + "generic_static", + "http", + "log", + "rand", + "ring 0.16.20", + "strum 0.24.1", + "thiserror", + "utf-8", ] [[package]] @@ -614,47 +639,21 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" -[[package]] -name = "aws-lc-rs" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f409eb70b561706bf8abba8ca9c112729c481595893fd06a2dd9af8ed8441148" -dependencies = [ - "aws-lc-sys", - "paste", - "untrusted 0.7.1", - "zeroize", -] - -[[package]] -name = "aws-lc-sys" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "923ded50f602b3007e5e63e3f094c479d9c8a9b42d7f4034e4afe456aa48bfd2" -dependencies = [ - "bindgen 0.69.5", - "cc", - "cmake", - "dunce", - "fs_extra", - "paste", -] - [[package]] name = "axum" -version = "0.7.9" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ "async-trait", "axum-core", + "bitflags 1.3.2", "bytes", "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", - "hyper 1.5.1", - "hyper-util", + "headers", + "http", + "http-body", + "hyper", "itoa", "matchit", "memchr", @@ -666,80 +665,47 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_urlencoded", - "sync_wrapper 1.0.2", + "sync_wrapper", "tokio", - "tower 0.5.2", + "tower", "tower-layer", "tower-service", - "tracing", ] [[package]] name = "axum-core" -version = "0.4.5" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", "bytes", "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", + "http", + "http-body", "mime", - "pin-project-lite", "rustversion", - "sync_wrapper 1.0.2", - "tower-layer", - "tower-service", - "tracing", -] - -[[package]] -name = "axum-extra" -version = "0.9.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c794b30c904f0a1c2fb7740f7df7f7972dfaa14ef6f57cb6178dc63e5dca2f04" -dependencies = [ - "axum", - "axum-core", - "bytes", - "fastrand", - "futures-util", - "headers", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", - "mime", - "multer", - "pin-project-lite", - "serde", - "tower 0.5.2", "tower-layer", "tower-service", ] [[package]] name = "axum-server" -version = "0.7.1" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56bac90848f6a9393ac03c63c640925c4b7c8ca21654de40d53f55964667c7d8" +checksum = "447f28c85900215cc1bea282f32d4a2f22d55c5a300afdfbc661c8d6a632e063" dependencies = [ "arc-swap", "bytes", "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", - "hyper 1.5.1", - "hyper-util", + "http", + "http-body", + "hyper", "pin-project-lite", - "rustls 0.23.21", - "rustls-pemfile 2.2.0", - "rustls-pki-types", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "tokio", - "tokio-rustls 0.26.1", - "tower 0.4.13", + "tokio-rustls", "tower-service", ] @@ -764,6 +730,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.21.7" @@ -823,19 +795,19 @@ dependencies = [ "lazycell", "peeking_take_while", "prettyplease", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "regex", "rustc-hash", "shlex", - "syn 2.0.90", + "syn 2.0.68", ] [[package]] name = "bindgen" -version = "0.69.5" +version = "0.69.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" dependencies = [ "bitflags 2.6.0", "cexpr", @@ -843,15 +815,12 @@ dependencies = [ "itertools 0.12.1", "lazy_static", "lazycell", - "log", - "prettyplease", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "regex", "rustc-hash", "shlex", - "syn 2.0.90", - "which", + "syn 2.0.68", ] [[package]] @@ -874,18 +843,18 @@ dependencies = [ [[package]] name = "bit-set" -version = "0.8.0" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" dependencies = [ "bit-vec", ] [[package]] name = "bit-vec" -version = "0.8.0" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" [[package]] name = "bitflags" @@ -994,7 +963,7 @@ dependencies = [ "async-channel", "async-task", "futures-io", - "futures-lite", + "futures-lite 2.3.0", "piper", ] @@ -1016,9 +985,9 @@ checksum = "c3ef8005764f53cd4dca619f5bf64cafd4664dada50ece25e4d81de54c80cc0b" dependencies = [ "once_cell", "proc-macro-crate", - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", "syn_derive", ] @@ -1062,9 +1031,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" dependencies = [ "serde", ] @@ -1175,7 +1144,7 @@ dependencies = [ "iana-time-zone", "num-traits", "serde", - "windows-targets 0.52.6", + "windows-targets 0.52.5", ] [[package]] @@ -1260,8 +1229,8 @@ checksum = "ae6371b8bdc8b7d3959e9cf7b22d4435ef3e79e138688421ec654acf8c81b008" dependencies = [ "heck 0.4.1", "proc-macro-error 1.0.4", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -1274,20 +1243,9 @@ dependencies = [ "os_str_bytes", ] -[[package]] -name = "cmake" -version = "0.1.52" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c682c223677e0e5b6b7f63a64b9351844c3f1b1678a68b7ee617e30fb082620e" -dependencies = [ - "cc", -] - [[package]] name = "cnidarium" -version = "0.82.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f0060de634f1f7411ab7464f1021f48ffdeb72fd63cf19553035fa766563828" +version = "0.81.0" dependencies = [ "anyhow", "async-trait", @@ -1304,6 +1262,7 @@ dependencies = [ "parking_lot", "pbjson", "pin-project", + "proptest", "prost", "regex", "rocksdb", @@ -1312,10 +1271,12 @@ dependencies = [ "smallvec", "tempfile", "tendermint", + "test-strategy", "tokio", "tokio-stream", "tonic", "tracing", + "tracing-subscriber 0.3.18", ] [[package]] @@ -1381,8 +1342,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b103d85ca6e209388771bfb7aa6b68a7aeec4afbf6f0a0264bfbf50360e5212e" dependencies = [ "crossterm", - "strum", - "strum_macros", + "strum 0.23.0", + "strum_macros 0.23.1", "unicode-width", ] @@ -1448,19 +1409,6 @@ version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" -[[package]] -name = "cosmos-sdk-proto" -version = "0.26.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "462e1f6a8e005acc8835d32d60cbd7973ed65ea2a8d8473830e675f050956427" -dependencies = [ - "informalsystems-pbjson", - "prost", - "serde", - "tendermint-proto", - "tonic", -] - [[package]] name = "cpufeatures" version = "0.2.12" @@ -1579,7 +1527,7 @@ dependencies = [ "bitflags 1.3.2", "crossterm_winapi", "libc", - "mio 0.8.11", + "mio", "parking_lot", "signal-hook", "signal-hook-mio", @@ -1675,10 +1623,10 @@ checksum = "622687fe0bac72a04e5599029151f5796111b90f1baaa9b544d807a5e31cd120" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "strsim 0.11.1", - "syn 2.0.90", + "syn 2.0.68", ] [[package]] @@ -1688,8 +1636,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178" dependencies = [ "darling_core", - "quote", - "syn 2.0.90", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -1742,7 +1690,7 @@ dependencies = [ "decaf377", "proptest", "rand_core", - "thiserror 1.0.61", + "thiserror", ] [[package]] @@ -1769,7 +1717,7 @@ dependencies = [ "hex", "proptest", "rand_core", - "thiserror 1.0.61", + "thiserror", "zeroize", "zeroize_derive", ] @@ -1788,7 +1736,7 @@ dependencies = [ "hex", "rand_core", "serde", - "thiserror 1.0.61", + "thiserror", "zeroize", ] @@ -1805,9 +1753,9 @@ dependencies = [ [[package]] name = "der-parser" -version = "9.0.0" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cd0a5c643689626bec213c4d8bd4d96acc8ffdb4ad4bb6bc16abf27d5f4b553" +checksum = "fe398ac75057914d7d07307bf67dc7f3f574a26783b4fc7805a20ffa9f506e82" dependencies = [ "asn1-rs", "displaydoc", @@ -1833,8 +1781,8 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -1844,8 +1792,8 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2c35ab6e03642397cdda1dd58abbc05d418aef8e36297f336d5aba060fe8df" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -1855,9 +1803,9 @@ version = "0.99.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -1925,9 +1873,9 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -1951,12 +1899,6 @@ version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" -[[package]] -name = "dunce" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" - [[package]] name = "dyn-clone" version = "1.0.17" @@ -1997,7 +1939,7 @@ dependencies = [ "rand_core", "serde", "sha2 0.9.9", - "thiserror 1.0.61", + "thiserror", "zeroize", ] @@ -2097,6 +2039,12 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b90ca2580b73ab6a1f724b76ca11ab632df820fd6040c336200d2c1df7b3c82c" +[[package]] +name = "event-listener" +version = "2.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" + [[package]] name = "event-listener" version = "5.3.1" @@ -2114,7 +2062,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" dependencies = [ - "event-listener", + "event-listener 5.3.1", "pin-project-lite", ] @@ -2149,6 +2097,15 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" +[[package]] +name = "fastrand" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51093e27b0797c359783294ca4f0a911c270184cb10f85783b118614a1501be" +dependencies = [ + "instant", +] + [[package]] name = "fastrand" version = "2.1.0" @@ -2232,7 +2189,7 @@ checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" dependencies = [ "futures-core", "futures-sink", - "spin", + "spin 0.9.8", ] [[package]] @@ -2241,12 +2198,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foldhash" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2" - [[package]] name = "foreign-types" version = "0.3.2" @@ -2288,7 +2239,7 @@ dependencies = [ "rand_core", "serde", "serdect", - "thiserror 1.0.61", + "thiserror", "visibility", "zeroize", ] @@ -2378,15 +2329,27 @@ checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-lite" -version = "2.3.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +checksum = "49a9d51ce47660b1e808d3c990b4709f2f415d928835a17dfd16991515c46bce" dependencies = [ - "fastrand", + "fastrand 1.9.0", "futures-core", "futures-io", + "memchr", "parking", "pin-project-lite", + "waker-fn", +] + +[[package]] +name = "futures-lite" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +dependencies = [ + "futures-core", + "pin-project-lite", ] [[package]] @@ -2395,20 +2358,19 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] name = "futures-rustls" -version = "0.26.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" +checksum = "35bd3cf68c183738046838e300353e4716c674dc5e56890de4826801a6622a28" dependencies = [ "futures-io", - "rustls 0.23.21", - "rustls-pki-types", + "rustls 0.21.12", ] [[package]] @@ -2466,8 +2428,8 @@ checksum = "784f84eebc366e15251c4a8c3acee82a6a6f427949776ecb88377362a9621738" dependencies = [ "proc-macro-error 0.4.12", "proc-macro-hack", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -2482,6 +2444,15 @@ dependencies = [ "zeroize", ] +[[package]] +name = "generic_static" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28ccff179d8070317671db09aee6d20affc26e88c5394714553b04f509b43a60" +dependencies = [ + "once_cell", +] + [[package]] name = "getrandom" version = "0.2.15" @@ -2507,6 +2478,31 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +[[package]] +name = "gloo-net" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9902a044653b26b99f7e3693a42f171312d9be8b26b5697bd1e43ad1f8a35e10" +dependencies = [ + "gloo-utils", + "js-sys", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-utils" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "037fcb07216cb3a30f7292bd0176b050b7b9a052ba830ef7d5d65f6dc64ba58e" +dependencies = [ + "js-sys", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "group" version = "0.13.0" @@ -2529,27 +2525,8 @@ dependencies = [ "futures-core", "futures-sink", "futures-util", - "http 0.2.12", - "indexmap 2.7.0", - "slab", - "tokio", - "tokio-util 0.7.11", - "tracing", -] - -[[package]] -name = "h2" -version = "0.4.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" -dependencies = [ - "atomic-waker", - "bytes", - "fnv", - "futures-core", - "futures-sink", - "http 1.2.0", - "indexmap 2.7.0", + "http", + "indexmap 2.2.6", "slab", "tokio", "tokio-util 0.7.11", @@ -2606,15 +2583,6 @@ dependencies = [ "allocator-api2", ] -[[package]] -name = "hashbrown" -version = "0.15.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" -dependencies = [ - "foldhash", -] - [[package]] name = "hashlink" version = "0.9.1" @@ -2636,14 +2604,14 @@ dependencies = [ [[package]] name = "headers" -version = "0.4.0" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9" +checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ "base64 0.21.7", "bytes", "headers-core", - "http 1.2.0", + "http", "httpdate", "mime", "sha1", @@ -2651,11 +2619,11 @@ dependencies = [ [[package]] name = "headers-core" -version = "0.3.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" +checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" dependencies = [ - "http 1.2.0", + "http", ] [[package]] @@ -2668,7 +2636,7 @@ dependencies = [ "hash32", "rustc_version", "serde", - "spin", + "spin 0.9.8", "stable_deref_trait", ] @@ -2704,9 +2672,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.4.0" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -2752,17 +2720,6 @@ dependencies = [ "itoa", ] -[[package]] -name = "http" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - [[package]] name = "http-body" version = "0.4.6" @@ -2770,32 +2727,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.12", + "http", "pin-project-lite", ] [[package]] -name = "http-body" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" -dependencies = [ - "bytes", - "http 1.2.0", -] - -[[package]] -name = "http-body-util" -version = "0.1.2" +name = "http-range-header" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" -dependencies = [ - "bytes", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "pin-project-lite", -] +checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" [[package]] name = "httparse" @@ -2823,49 +2763,28 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.31" +version = "0.14.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" +checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", + "h2", + "http", + "http-body", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.5.7", "tokio", "tower-service", "tracing", "want", ] -[[package]] -name = "hyper" -version = "1.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "h2 0.4.7", - "http 1.2.0", - "http-body 1.0.1", - "httparse", - "httpdate", - "itoa", - "pin-project-lite", - "smallvec", - "tokio", - "want", -] - [[package]] name = "hyper-rustls" version = "0.24.2" @@ -2873,77 +2792,36 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", - "http 0.2.12", - "hyper 0.14.31", + "http", + "hyper", "rustls 0.21.12", "tokio", - "tokio-rustls 0.24.1", + "tokio-rustls", ] [[package]] -name = "hyper-rustls" -version = "0.27.3" +name = "hyper-timeout" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "futures-util", - "http 1.2.0", - "hyper 1.5.1", - "hyper-util", - "rustls 0.23.21", - "rustls-native-certs 0.8.0", - "rustls-pki-types", + "hyper", + "pin-project-lite", "tokio", - "tokio-rustls 0.26.1", - "tower-service", -] - -[[package]] -name = "hyper-timeout" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" -dependencies = [ - "hyper 1.5.1", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", + "tokio-io-timeout", ] [[package]] name = "hyper-tls" -version = "0.6.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes", - "http-body-util", - "hyper 1.5.1", - "hyper-util", + "hyper", "native-tls", "tokio", "tokio-native-tls", - "tower-service", -] - -[[package]] -name = "hyper-util" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "hyper 1.5.1", - "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", ] [[package]] @@ -2971,13 +2849,12 @@ dependencies = [ [[package]] name = "ibc-proto" -version = "0.51.1" +version = "0.41.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b70f517162e74e2d35875b8b94bf4d1e45f2c69ef3de452dc855944455d33ca" +checksum = "dd4ee32b22d3b06f31529b956f4928e5c9a068d71e46cf6abfa19c31ca550553" dependencies = [ - "base64 0.22.1", + "base64 0.21.7", "bytes", - "cosmos-sdk-proto", "flex-error", "ics23", "informalsystems-pbjson", @@ -2990,9 +2867,9 @@ dependencies = [ [[package]] name = "ibc-types" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd68e32f5bd94849131670d34e21b0fb66057e91bee8e451e7f4e216e71616d2" +checksum = "ba606d86e2015991f86a129935dbaeacd94beab72fb90a733c1b1ea76be708a2" dependencies = [ "ibc-types-core-channel", "ibc-types-core-client", @@ -3008,9 +2885,9 @@ dependencies = [ [[package]] name = "ibc-types-core-channel" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c57da64f7e945e7443035275cc279e0176c988ea625968c2526706aafcff1766" +checksum = "86fb64ef52086b727e5ae01da0e773f8ca9172ec1fd9d0aa1a79c0c2c610b17a" dependencies = [ "anyhow", "bytes", @@ -3041,9 +2918,9 @@ dependencies = [ [[package]] name = "ibc-types-core-client" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "108a89f64ffa39e04a7e29de9d9824523c3164f8518927716be870ad8a7aedf9" +checksum = "4db9d4b136b9e84ccf581fec02bb9ebc4478ac0f145c526760ed4310b98741e7" dependencies = [ "anyhow", "bytes", @@ -3068,9 +2945,9 @@ dependencies = [ [[package]] name = "ibc-types-core-commitment" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8949e33fbb9c3f1ae49588f4829977e80439f3dfc1491b5a69492f5e4a036949" +checksum = "7e2c527e14707dd0b2c7e6e2f6f62b0655c83154ae3eb1504e441d9d8f454ac6" dependencies = [ "anyhow", "bytes", @@ -3103,9 +2980,9 @@ dependencies = [ [[package]] name = "ibc-types-core-connection" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63035288d11e5830daf20fd99507a671ab1cc03fa04752d75ef1c75e2786543" +checksum = "5a8a326c00e9ba48059407478c826237fe39cc90dd2b47182484192926904fe7" dependencies = [ "anyhow", "bytes", @@ -3133,9 +3010,9 @@ dependencies = [ [[package]] name = "ibc-types-domain-type" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2c543a23a77a5d814e0aeffd5a918964468a8b213f664897bc3f8c50cc7d5c1" +checksum = "3abc9619b9dd7201804f45fc7f335dda72d2e4d6f82d96e8fe3abf4585e6101b" dependencies = [ "anyhow", "bytes", @@ -3144,9 +3021,9 @@ dependencies = [ [[package]] name = "ibc-types-identifier" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ed2d7f06055bb2548564bf02c8c4b47561134f282adae61dc5c6ed722e1cde" +checksum = "405880cf06fef65f51c5c91b7efbdcbc8d7eba0ac16b43538b36ebd17f21edea" dependencies = [ "displaydoc", "serde", @@ -3155,9 +3032,9 @@ dependencies = [ [[package]] name = "ibc-types-lightclients-tendermint" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00fd846b7eedca1dfbb7babeb55e0d74fb0d09a7db63da93737d54a581a96371" +checksum = "2ab22446058bd5afa50d64f8519a9107bbc5101ee65373df896314f52afa0fc6" dependencies = [ "anyhow", "bytes", @@ -3192,9 +3069,9 @@ dependencies = [ [[package]] name = "ibc-types-path" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f4da80e010427aff50f227920cdc8f7879469ce2db181cbcf92dc228172a334" +checksum = "a29e6fd8871fdced76402a3008219abf8773e527a46f120e0d76d6a3bb9706c1" dependencies = [ "bytes", "derive_more", @@ -3215,9 +3092,9 @@ dependencies = [ [[package]] name = "ibc-types-timestamp" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "748b2fbebe30ca799d31f6fc220d705b5180a608db842e29a3722671411d81a6" +checksum = "93d2e763838dbef62ca8a1344b4dd5b3919d685b4c61874183724644c912237a" dependencies = [ "bytes", "displaydoc", @@ -3234,9 +3111,9 @@ dependencies = [ [[package]] name = "ibc-types-transfer" -version = "0.15.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fcb46fe34a87db49bc9bb20918d7f882871a20d741d866d217c52a5dbe16a72" +checksum = "ad973ca1fbad8d0d1632ec0a329aecff8731bbb96395b7553d6b9fd749356d34" dependencies = [ "displaydoc", "serde", @@ -3257,9 +3134,9 @@ dependencies = [ [[package]] name = "ics23" -version = "0.12.0" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b17f1a5bd7d12ad30a21445cfa5f52fd7651cb3243ba866f9916b1ec112f12" +checksum = "18798160736c1e368938ba6967dbcb3c7afb3256b442a5506ba5222eebb68a5a" dependencies = [ "anyhow", "blake2", @@ -3329,8 +3206,8 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "11d7a9f6330b71fea57921c9b61c47ee6e84f72d394754eff6163ae67e7395eb" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -3353,8 +3230,8 @@ checksum = "3a7d6e1419fa3129eb0802b4c99603c0d425c79fb5d76191d5a20d0ab0d664e8" dependencies = [ "libflate", "proc-macro-hack", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -3387,12 +3264,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.7.0" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.15.2", + "hashbrown 0.14.5", "serde", ] @@ -3427,6 +3304,26 @@ dependencies = [ "generic-array", ] +[[package]] +name = "instant" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi 0.3.9", + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "ipnet" version = "2.9.0" @@ -3460,15 +3357,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.11" @@ -3477,9 +3365,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jmt" -version = "0.11.0" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf2a10370b45cd850e64993ccd81d25ea2d4b5b0d0312546e7489fed82064f2e" +checksum = "a9a3bf1a303934c6f75533bd3a563730a0730f9361023c49ed6aee9fcb5b98f8" dependencies = [ "anyhow", "borsh", @@ -3493,7 +3381,7 @@ dependencies = [ "num-traits", "serde", "sha2 0.10.8", - "thiserror 1.0.61", + "thiserror", "tracing", ] @@ -3542,7 +3430,7 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin", + "spin 0.9.8", ] [[package]] @@ -3553,9 +3441,9 @@ checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" -version = "0.2.168" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" [[package]] name = "libflate" @@ -3584,7 +3472,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e310b3a6b5907f99202fcdb4960ff45b93735d7c7d96b760fcff8db2dc0e103d" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.52.5", ] [[package]] @@ -3599,7 +3487,7 @@ version = "0.14.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae9ea4b75e1a81675429dafe43441df1caea70081e82246a8cccf514884a88bb" dependencies = [ - "bindgen 0.69.5", + "bindgen 0.69.4", "errno", "libc", ] @@ -3663,6 +3551,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "linux-raw-sys" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -3789,33 +3683,21 @@ dependencies = [ "portable-atomic", ] -[[package]] -name = "metrics" -version = "0.24.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a7deb012b3b2767169ff203fadb4c6b0b82b947512e5eb9e0b78c2e186ad9e3" -dependencies = [ - "ahash", - "portable-atomic", -] - [[package]] name = "metrics-exporter-prometheus" -version = "0.16.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b6f8152da6d7892ff1b7a1c0fa3f435e92b5918ad67035c3bb432111d9a29b" +checksum = "9bf4e7146e30ad172c42c39b3246864bd2d3c6396780711a1baf749cfe423e21" dependencies = [ - "base64 0.22.1", - "http-body-util", - "hyper 1.5.1", - "hyper-rustls 0.27.3", - "hyper-util", - "indexmap 2.7.0", + "base64 0.21.7", + "hyper", + "hyper-tls", + "indexmap 2.2.6", "ipnet", - "metrics 0.24.1", + "metrics 0.22.3", "metrics-util", "quanta", - "thiserror 1.0.61", + "thiserror", "tokio", "tracing", ] @@ -3837,14 +3719,14 @@ dependencies = [ [[package]] name = "metrics-tracing-context" -version = "0.17.0" +version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1ada651cd6bdffe01e5f35067df53491f1fe853d2b154008ca2bd30b3d3fcf6" +checksum = "fb791d015f8947acf5a7f62bd28d00f289bb7ea98cfbe3ffec1d061eee12df12" dependencies = [ - "indexmap 2.7.0", + "indexmap 2.2.6", "itoa", "lockfree-object-pool", - "metrics 0.24.1", + "metrics 0.22.3", "metrics-util", "once_cell", "tracing", @@ -3854,16 +3736,17 @@ dependencies = [ [[package]] name = "metrics-util" -version = "0.18.0" +version = "0.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15b482df36c13dd1869d73d14d28cd4855fbd6cfc32294bee109908a9f4a4ed7" +checksum = "8b07a5eb561b8cbc16be2d216faf7757f9baf3bfb94dbb0fae3df8387a5bb47f" dependencies = [ "aho-corasick", "crossbeam-epoch", "crossbeam-utils", - "hashbrown 0.15.2", - "indexmap 2.7.0", - "metrics 0.24.1", + "hashbrown 0.14.5", + "indexmap 2.2.6", + "metrics 0.22.3", + "num_cpus", "ordered-float", "quanta", "radix_trie", @@ -3913,40 +3796,12 @@ dependencies = [ "windows-sys 0.48.0", ] -[[package]] -name = "mio" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" -dependencies = [ - "libc", - "wasi", - "windows-sys 0.52.0", -] - [[package]] name = "mirai-annotations" version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" -[[package]] -name = "multer" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" -dependencies = [ - "bytes", - "encoding_rs", - "futures-util", - "http 1.2.0", - "httparse", - "memchr", - "mime", - "spin", - "version_check", -] - [[package]] name = "multimap" version = "0.10.0" @@ -4066,8 +3921,8 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -4101,6 +3956,16 @@ dependencies = [ "libm", ] +[[package]] +name = "num_cpus" +version = "1.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +dependencies = [ + "hermit-abi 0.3.9", + "libc", +] + [[package]] name = "number_prefix" version = "0.4.0" @@ -4124,9 +3989,9 @@ dependencies = [ [[package]] name = "oid-registry" -version = "0.7.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d8034d9489cdaf79228eb9f6a3b8d7bb32ba00d6645ebd48eef4077ceb5bd9" +checksum = "38e20717fa0541f39bd146692035c37bedfa532b3e5071b35761082407546b2a" dependencies = [ "asn1-rs", ] @@ -4170,9 +4035,9 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -4235,8 +4100,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d830939c76d294956402033aee57a6da7b438f2294eb94864c37b0569053a42c" dependencies = [ "proc-macro-crate", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -4266,7 +4131,7 @@ dependencies = [ "libc", "redox_syscall 0.5.2", "smallvec", - "windows-targets 0.52.6", + "windows-targets 0.52.5", ] [[package]] @@ -4299,9 +4164,9 @@ checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "pbjson" -version = "0.7.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7e6349fa080353f4a597daffd05cb81572a9c031a6d4fff7e504947496fcc68" +checksum = "1030c719b0ec2a2d25a5df729d6cff1acf3cc230bf766f4f97833591f7577b90" dependencies = [ "base64 0.21.7", "serde", @@ -4309,21 +4174,21 @@ dependencies = [ [[package]] name = "pbjson-build" -version = "0.7.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eea3058763d6e656105d1403cb04e0a41b7bbac6362d413e7c33be0c32279c9" +checksum = "2580e33f2292d34be285c5bc3dba5259542b083cfad6037b6d70345f24dcb735" dependencies = [ - "heck 0.5.0", - "itertools 0.13.0", + "heck 0.4.1", + "itertools 0.11.0", "prost", "prost-types", ] [[package]] name = "pbjson-types" -version = "0.7.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e54e5e7bfb1652f95bc361d76f3c780d8e526b134b85417e774166ee941f0887" +checksum = "18f596653ba4ac51bdecbb4ef6773bc7f56042dc13927910de1684ad3d32aa12" dependencies = [ "bytes", "chrono", @@ -4370,7 +4235,6 @@ dependencies = [ "bytes", "camino", "clap", - "cnidarium", "colored", "colored_json", "comfy-table", @@ -4381,7 +4245,7 @@ dependencies = [ "ed25519-consensus", "futures", "hex", - "http-body 1.0.1", + "http-body", "humantime", "ibc-proto", "ibc-types", @@ -4418,7 +4282,6 @@ dependencies = [ "rand_core", "regex", "rpassword", - "rustls 0.23.21", "serde", "serde_json", "serde_with", @@ -4433,7 +4296,7 @@ dependencies = [ "tokio-util 0.7.11", "toml 0.7.8", "tonic", - "tower 0.4.13", + "tower", "tracing", "tracing-subscriber 0.3.18", "url", @@ -4455,11 +4318,11 @@ dependencies = [ "ed25519-consensus", "futures", "hex", - "http 1.2.0", - "http-body 1.0.1", + "http", + "http-body", "ibc-proto", "ibc-types", - "metrics 0.24.1", + "metrics 0.22.3", "parking_lot", "penumbra-app", "penumbra-asset", @@ -4473,7 +4336,6 @@ dependencies = [ "prost", "rand", "rand_core", - "rustls 0.23.21", "serde", "serde_json", "serde_with", @@ -4486,7 +4348,7 @@ dependencies = [ "tonic", "tonic-reflection", "tonic-web", - "tower 0.4.13", + "tower", "tracing", "tracing-subscriber 0.3.18", "url", @@ -4519,12 +4381,12 @@ dependencies = [ "fs_extra", "futures", "hex", - "http 1.2.0", + "http", "ibc-proto", "ibc-types", "ics23", "jmt", - "metrics 0.24.1", + "metrics 0.22.3", "metrics-exporter-prometheus", "metrics-process", "metrics-tracing-context", @@ -4563,9 +4425,8 @@ dependencies = [ "rand_chacha", "rand_core", "regex", - "reqwest 0.12.9", + "reqwest", "rocksdb", - "rustls 0.23.21", "serde", "serde_json", "serde_with", @@ -4584,10 +4445,10 @@ dependencies = [ "tonic", "tonic-reflection", "tonic-web", - "tower 0.4.13", + "tower", "tower-abci", "tower-actor", - "tower-http 0.6.2", + "tower-http", "tower-service", "tracing", "tracing-subscriber 0.3.18", @@ -4618,8 +4479,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f241d42067ed3ab6a4fece1db720838e1418f36d868585a27931f95d6bc03582" dependencies = [ "peg-runtime", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", ] [[package]] @@ -4630,12 +4491,11 @@ checksum = "e3aeb8f54c078314c2065ee649a7241f46b9d8e418e1a9581ba0546657d7aa3a" [[package]] name = "pem" -version = "3.0.4" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e459365e590736a54c3fa561947c84837534b8e9af6fc5bf781307e82658fae" +checksum = "a8835c273a76a90455d7344889b0964598e3316e2a79ede8e36f16bdcf2228b8" dependencies = [ - "base64 0.22.1", - "serde", + "base64 0.13.1", ] [[package]] @@ -4675,7 +4535,7 @@ dependencies = [ "ics23", "im", "jmt", - "metrics 0.24.1", + "metrics 0.22.3", "once_cell", "parking_lot", "penumbra-asset", @@ -4726,10 +4586,10 @@ dependencies = [ "tonic", "tonic-reflection", "tonic-web", - "tower 0.4.13", + "tower", "tower-abci", "tower-actor", - "tower-http 0.6.2", + "tower-http", "tower-service", "tracing", "tracing-subscriber 0.3.18", @@ -4772,7 +4632,7 @@ dependencies = [ "serde_json", "serde_with", "sha2 0.10.8", - "thiserror 1.0.61", + "thiserror", "tracing", ] @@ -4801,7 +4661,7 @@ dependencies = [ "futures", "hex", "im", - "metrics 0.24.1", + "metrics 0.22.3", "once_cell", "pbjson-types", "penumbra-asset", @@ -4838,7 +4698,7 @@ dependencies = [ "anyhow", "axum-server", "futures", - "rustls 0.23.21", + "rustls 0.21.12", "rustls-acme", "tracing", ] @@ -4880,7 +4740,7 @@ dependencies = [ "rand", "rand_core", "regex", - "reqwest 0.12.9", + "reqwest", "serde", "serde_json", "sha2 0.10.8", @@ -4900,7 +4760,7 @@ dependencies = [ "cnidarium-component", "futures", "hex", - "metrics 0.24.1", + "metrics 0.22.3", "once_cell", "pbjson-types", "penumbra-asset", @@ -4933,7 +4793,7 @@ dependencies = [ "decaf377-rdsa", "futures", "im", - "metrics 0.24.1", + "metrics 0.22.3", "penumbra-dex", "penumbra-fee", "penumbra-governance", @@ -5016,7 +4876,7 @@ dependencies = [ "hex", "im", "itertools 0.11.0", - "metrics 0.24.1", + "metrics 0.22.3", "metrics-exporter-prometheus", "once_cell", "parking_lot", @@ -5044,7 +4904,7 @@ dependencies = [ "tap", "tendermint", "tendermint-light-client-verifier", - "thiserror 1.0.61", + "thiserror", "tokio", "tokio-stream", "tonic", @@ -5084,7 +4944,7 @@ dependencies = [ "proptest", "rand", "rand_core", - "thiserror 1.0.61", + "thiserror", "tokio", ] @@ -5103,7 +4963,7 @@ dependencies = [ "decaf377-rdsa", "getrandom", "im", - "metrics 0.24.1", + "metrics 0.22.3", "penumbra-asset", "penumbra-num", "penumbra-proto", @@ -5124,7 +4984,7 @@ dependencies = [ "cnidarium", "cnidarium-component", "futures", - "metrics 0.24.1", + "metrics 0.22.3", "penumbra-asset", "penumbra-community-pool", "penumbra-distributions", @@ -5161,7 +5021,7 @@ dependencies = [ "futures", "ibc-types", "im", - "metrics 0.24.1", + "metrics 0.22.3", "once_cell", "pbjson-types", "penumbra-asset", @@ -5186,7 +5046,7 @@ dependencies = [ "serde_json", "tap", "tendermint", - "thiserror 1.0.61", + "thiserror", "tokio", "tonic", "tracing", @@ -5207,7 +5067,7 @@ dependencies = [ "ibc-proto", "ibc-types", "ics23", - "metrics 0.24.1", + "metrics 0.22.3", "num-traits", "once_cell", "pbjson-types", @@ -5225,7 +5085,7 @@ dependencies = [ "time", "tokio", "tonic", - "tower 0.4.13", + "tower", "tracing", ] @@ -5272,7 +5132,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "thiserror 1.0.61", + "thiserror", "tracing", ] @@ -5289,7 +5149,6 @@ dependencies = [ "penumbra-proto", "penumbra-view", "predicates 2.1.5", - "rustls 0.23.21", "serde_json", "tokio", "tonic", @@ -5331,7 +5190,7 @@ dependencies = [ "tap", "tendermint", "tendermint-proto", - "tower 0.4.13", + "tower", "tracing", ] @@ -5383,7 +5242,7 @@ dependencies = [ "serde", "serde_json", "sha2 0.10.8", - "thiserror 1.0.61", + "thiserror", "tracing", ] @@ -5425,7 +5284,7 @@ dependencies = [ "rand", "rand_core", "regex", - "reqwest 0.12.9", + "reqwest", "serde", "serde_json", "sha2 0.10.8", @@ -5473,8 +5332,7 @@ dependencies = [ "decaf377-rdsa", "futures", "hex", - "http-body 1.0.1", - "http-body-util", + "http-body", "ibc-proto", "ibc-types", "ics23", @@ -5488,9 +5346,9 @@ dependencies = [ "tendermint", "tendermint-proto", "tendermint-rpc", - "thiserror 1.0.61", + "thiserror", "tonic", - "tower 0.4.13", + "tower", "tracing", ] @@ -5515,7 +5373,7 @@ dependencies = [ "getrandom", "hex", "im", - "metrics 0.24.1", + "metrics 0.22.3", "once_cell", "pbjson-types", "penumbra-keys", @@ -5558,7 +5416,7 @@ dependencies = [ "ibc-proto", "ibc-types", "im", - "metrics 0.24.1", + "metrics 0.22.3", "once_cell", "penumbra-asset", "penumbra-ibc", @@ -5579,7 +5437,7 @@ dependencies = [ "serde_json", "tap", "tendermint", - "thiserror 1.0.61", + "thiserror", "tonic", "tracing", ] @@ -5609,7 +5467,7 @@ dependencies = [ "getrandom", "hex", "im", - "metrics 0.24.1", + "metrics 0.22.3", "once_cell", "penumbra-asset", "penumbra-distributions", @@ -5665,7 +5523,7 @@ dependencies = [ "serde", "serde_json", "static_assertions", - "thiserror 1.0.61", + "thiserror", "tracing", ] @@ -5687,7 +5545,6 @@ version = "0.81.0" dependencies = [ "anyhow", "axum", - "axum-extra", "axum-server", "bytes", "clap", @@ -5708,7 +5565,7 @@ dependencies = [ "tokio-stream", "tokio-util 0.7.11", "tonic", - "tower-http 0.6.2", + "tower-http", "tracing-subscriber 0.3.18", ] @@ -5720,8 +5577,8 @@ dependencies = [ "chrono", "futures", "hex", - "http 1.2.0", - "metrics 0.24.1", + "http", + "metrics 0.22.3", "pbjson-types", "penumbra-proto", "penumbra-transaction", @@ -5738,7 +5595,7 @@ dependencies = [ "tokio-stream", "tokio-util 0.7.11", "tonic", - "tower 0.4.13", + "tower", "tower-service", "tracing", "url", @@ -5758,7 +5615,7 @@ version = "0.81.0" dependencies = [ "futures", "hex", - "http 1.2.0", + "http", "pin-project", "pin-project-lite", "sha2 0.10.8", @@ -5768,7 +5625,7 @@ dependencies = [ "tokio-stream", "tokio-util 0.7.11", "tonic", - "tower 0.4.13", + "tower", "tower-service", "tracing", ] @@ -5823,7 +5680,7 @@ dependencies = [ "serde_json", "sha2 0.10.8", "tendermint", - "thiserror 1.0.61", + "thiserror", "tokio", "tracing", ] @@ -5851,7 +5708,6 @@ dependencies = [ "async-trait", "bytes", "camino", - "cnidarium", "decaf377", "digest 0.9.0", "ed25519-consensus", @@ -5859,7 +5715,7 @@ dependencies = [ "genawaiter", "hex", "ibc-types", - "metrics 0.24.1", + "metrics 0.22.3", "once_cell", "parking_lot", "pbjson-types", @@ -5934,7 +5790,7 @@ dependencies = [ "serde_json", "tokio", "tonic", - "tower 0.4.13", + "tower", "tracing", ] @@ -5951,7 +5807,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.7.0", + "indexmap 2.2.6", ] [[package]] @@ -5969,9 +5825,9 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -6023,7 +5879,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391" dependencies = [ "atomic-waker", - "fastrand", + "fastrand 2.1.0", "futures-io", ] @@ -6107,7 +5963,6 @@ dependencies = [ "penumbra-tct", "penumbra-view", "regex", - "rustls 0.23.21", "serde", "serde_json", "tempfile", @@ -6122,17 +5977,18 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.4" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" +checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" dependencies = [ + "autocfg", + "bitflags 1.3.2", "cfg-if", "concurrent-queue", - "hermit-abi 0.4.0", + "libc", + "log", "pin-project-lite", - "rustix", - "tracing", - "windows-sys 0.59.0", + "windows-sys 0.48.0", ] [[package]] @@ -6265,8 +6121,8 @@ version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" dependencies = [ - "proc-macro2 1.0.92", - "syn 2.0.90", + "proc-macro2 1.0.86", + "syn 2.0.68", ] [[package]] @@ -6297,8 +6153,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "18f33027081eba0a6d8aba6d1b1c3a3be58cbb12106341c2d5759fcd9b5277e7" dependencies = [ "proc-macro-error-attr 0.4.12", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", "version_check", ] @@ -6310,8 +6166,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr 1.0.4", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", "version_check", ] @@ -6322,8 +6178,8 @@ version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a5b4b77fdb63c1eca72173d68d24501c54ab1269409f6b672c85deb18af69de" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", "syn-mid", "version_check", @@ -6335,8 +6191,8 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "version_check", ] @@ -6352,14 +6208,23 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "557facecbf90ff79faea80a08230d10c812016aa19198ed07d06de61f965b5cc" dependencies = [ - "unicode-xid", + "unicode-xid 0.1.0", +] + +[[package]] +name = "proc-macro2" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" +dependencies = [ + "unicode-xid 0.1.0", ] [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -6374,7 +6239,7 @@ dependencies = [ "hex", "lazy_static", "procfs-core", - "rustix", + "rustix 0.38.34", ] [[package]] @@ -6389,9 +6254,9 @@ dependencies = [ [[package]] name = "proptest" -version = "1.6.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14cae93065090804185d3b75f0bf93b8eeda30c7a9b4a33d3bdb3988d6229e50" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ "bit-set", "bit-vec", @@ -6409,20 +6274,20 @@ dependencies = [ [[package]] name = "proptest-derive" -version = "0.5.1" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee1c9ac207483d5e7db4940700de86a9aae46ef90c48b57f99fe7edb8345e49" +checksum = "90b46295382dc76166cb7cf2bb4a97952464e4b7ed5a43e6cd34e1fec3349ddc" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 0.4.30", + "quote 0.6.13", + "syn 0.15.44", ] [[package]] name = "prost" -version = "0.13.4" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ "bytes", "prost-derive", @@ -6430,12 +6295,13 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.4" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" +checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4" dependencies = [ + "bytes", "heck 0.5.0", - "itertools 0.13.0", + "itertools 0.12.1", "log", "multimap", "once_cell", @@ -6444,28 +6310,28 @@ dependencies = [ "prost", "prost-types", "regex", - "syn 2.0.90", + "syn 2.0.68", "tempfile", ] [[package]] name = "prost-derive" -version = "0.13.4" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.13.0", - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "itertools 0.12.1", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] name = "prost-reflect" -version = "0.14.3" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20ae544fca2892fd4b7e9ff26cba1090cedf1d4d95c2aded1af15d2f93f270b8" +checksum = "6f5eec97d5d34bdd17ad2db2219aabf46b054c6c41bd5529767c9ce55be5898f" dependencies = [ "once_cell", "prost", @@ -6474,9 +6340,9 @@ dependencies = [ [[package]] name = "prost-types" -version = "0.13.4" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" +checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" dependencies = [ "prost", ] @@ -6502,13 +6368,22 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" +[[package]] +name = "quote" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" +dependencies = [ + "proc-macro2 0.4.30", +] + [[package]] name = "quote" version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ - "proc-macro2 1.0.92", + "proc-macro2 1.0.86", ] [[package]] @@ -6644,13 +6519,12 @@ dependencies = [ [[package]] name = "rcgen" -version = "0.13.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54077e1872c46788540de1ea3d7f4ccb1983d12f9aa909b234468676c1a36779" +checksum = "ffbe84efe2f38dea12e9bfc1f65377fdf03e53a18cb3b995faedf7934c7e785b" dependencies = [ - "aws-lc-rs", "pem", - "rustls-pki-types", + "ring 0.16.20", "time", "yasna", ] @@ -6687,7 +6561,7 @@ checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" dependencies = [ "getrandom", "libredox 0.1.3", - "thiserror 1.0.61", + "thiserror", ] [[package]] @@ -6745,56 +6619,12 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2 0.3.26", - "http 0.2.12", - "http-body 0.4.6", - "hyper 0.14.31", - "hyper-rustls 0.24.2", - "ipnet", - "js-sys", - "log", - "mime", - "once_cell", - "percent-encoding", - "pin-project-lite", - "rustls 0.21.12", - "rustls-native-certs 0.6.3", - "rustls-pemfile 1.0.4", - "serde", - "serde_json", - "serde_urlencoded", - "sync_wrapper 0.1.2", - "system-configuration 0.5.1", - "tokio", - "tokio-rustls 0.24.1", - "tower-service", - "url", - "wasm-bindgen", - "wasm-bindgen-futures", - "web-sys", - "winreg", -] - -[[package]] -name = "reqwest" -version = "0.12.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" -dependencies = [ - "base64 0.22.1", - "bytes", - "encoding_rs", - "futures-channel", - "futures-core", - "futures-util", - "h2 0.4.7", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", - "hyper 1.5.1", - "hyper-rustls 0.27.3", + "h2", + "http", + "http-body", + "hyper", + "hyper-rustls", "hyper-tls", - "hyper-util", "ipnet", "js-sys", "log", @@ -6803,14 +6633,17 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile 2.2.0", + "rustls 0.21.12", + "rustls-native-certs", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 1.0.2", - "system-configuration 0.6.1", + "sync_wrapper", + "system-configuration", "tokio", "tokio-native-tls", + "tokio-rustls", "tokio-util 0.7.11", "tower-service", "url", @@ -6818,7 +6651,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "windows-registry", + "winreg", ] [[package]] @@ -6831,6 +6664,21 @@ dependencies = [ "subtle", ] +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + [[package]] name = "ring" version = "0.17.8" @@ -6841,7 +6689,7 @@ dependencies = [ "cfg-if", "getrandom", "libc", - "spin", + "spin 0.9.8", "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -6971,6 +6819,20 @@ dependencies = [ "nom", ] +[[package]] +name = "rustix" +version = "0.37.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + [[package]] name = "rustix" version = "0.38.34" @@ -6980,7 +6842,7 @@ dependencies = [ "bitflags 2.6.0", "errno", "libc", - "linux-raw-sys", + "linux-raw-sys 0.4.14", "windows-sys 0.52.0", ] @@ -6991,21 +6853,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", - "ring", + "ring 0.17.8", "rustls-webpki 0.101.7", "sct", ] [[package]] name = "rustls" -version = "0.23.21" +version = "0.23.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f287924602bf649d949c63dc8ac8b235fa5387d394020705b80c4eb597ce5b8" +checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" dependencies = [ - "aws-lc-rs", - "log", "once_cell", - "ring", + "ring 0.17.8", "rustls-pki-types", "rustls-webpki 0.102.8", "subtle", @@ -7014,30 +6874,31 @@ dependencies = [ [[package]] name = "rustls-acme" -version = "0.12.1" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54f05935c0b1d7c5981c40b768c5d5ed96a43f5cb5166f8f5be09779c5825697" +checksum = "e0e7754a9b89270815d1b119cdd35489380dc3598e24a952bf8a167c00b68b61" dependencies = [ "async-io", "async-trait", "async-web-client", - "aws-lc-rs", "axum-server", - "base64 0.22.1", + "base64 0.13.1", "blocking", "chrono", "futures", "futures-rustls", - "http 1.2.0", + "http", "log", "pem", "rcgen", + "ring 0.16.20", + "rustls 0.21.12", "serde", "serde_json", - "thiserror 2.0.6", + "thiserror", "tokio", "tokio-util 0.7.11", - "webpki-roots", + "webpki-roots 0.25.4", "x509-parser", ] @@ -7053,19 +6914,6 @@ dependencies = [ "security-framework", ] -[[package]] -name = "rustls-native-certs" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcaf18a4f2be7326cd874a5fa579fae794320a0f388d365dca7e480e55f83f8a" -dependencies = [ - "openssl-probe", - "rustls-pemfile 2.2.0", - "rustls-pki-types", - "schannel", - "security-framework", -] - [[package]] name = "rustls-pemfile" version = "1.0.4" @@ -7096,7 +6944,7 @@ version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", + "ring 0.17.8", "untrusted 0.9.0", ] @@ -7106,8 +6954,7 @@ version = "0.102.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" dependencies = [ - "aws-lc-rs", - "ring", + "ring 0.17.8", "rustls-pki-types", "untrusted 0.9.0", ] @@ -7222,7 +7069,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", + "ring 0.17.8", "untrusted 0.9.0", ] @@ -7292,9 +7139,9 @@ version = "1.0.203" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -7303,7 +7150,7 @@ version = "1.0.118" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d947f6b3163d8857ea16c4fa0dd4840d52f3041039a85decd46867eb1abef2e4" dependencies = [ - "indexmap 2.7.0", + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -7325,9 +7172,9 @@ version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -7354,8 +7201,8 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f1b15838534b38fb67ffe60033fe3ffad48f916c175e8baa0400e0cdb958dec" dependencies = [ - "quote", - "syn 2.0.90", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -7380,7 +7227,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.7.0", + "indexmap 2.2.6", "serde", "serde_derive", "serde_json", @@ -7395,9 +7242,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "65569b702f41443e8bc8bbb1c5779bd0450bbe723b56198980e80ec45780bce2" dependencies = [ "darling", - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -7493,7 +7340,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" dependencies = [ "libc", - "mio 0.8.11", + "mio", "signal-hook", ] @@ -7534,9 +7381,9 @@ dependencies = [ [[package]] name = "sketches-ddsketch" -version = "0.3.0" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e9a774a6c28142ac54bb25d25562e6bcf957493a184f15ad4eebccb23e410a" +checksum = "85636c14b73d81f541e525f585c0a2109e6744e1565b5c1668e31c70c10ed65c" [[package]] name = "slab" @@ -7556,6 +7403,16 @@ dependencies = [ "serde", ] +[[package]] +name = "socket2" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +dependencies = [ + "libc", + "winapi", +] + [[package]] name = "socket2" version = "0.5.7" @@ -7566,6 +7423,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + [[package]] name = "spin" version = "0.9.8" @@ -7622,7 +7485,7 @@ dependencies = [ "crc", "crossbeam-queue", "either", - "event-listener", + "event-listener 5.3.1", "futures-channel", "futures-core", "futures-intrusive", @@ -7631,25 +7494,25 @@ dependencies = [ "hashbrown 0.14.5", "hashlink", "hex", - "indexmap 2.7.0", + "indexmap 2.2.6", "log", "memchr", "once_cell", "paste", "percent-encoding", - "rustls 0.23.21", + "rustls 0.23.19", "rustls-pemfile 2.2.0", "serde", "serde_json", "sha2 0.10.8", "smallvec", "sqlformat", - "thiserror 1.0.61", + "thiserror", "tokio", "tokio-stream", "tracing", "url", - "webpki-roots", + "webpki-roots 0.26.7", ] [[package]] @@ -7658,11 +7521,11 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cac0692bcc9de3b073e8d747391827297e075c7710ff6276d9f7a1f3d58c6657" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "sqlx-core", "sqlx-macros-core", - "syn 2.0.90", + "syn 2.0.68", ] [[package]] @@ -7676,8 +7539,8 @@ dependencies = [ "heck 0.5.0", "hex", "once_cell", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "serde", "serde_json", "sha2 0.10.8", @@ -7685,7 +7548,7 @@ dependencies = [ "sqlx-mysql", "sqlx-postgres", "sqlx-sqlite", - "syn 2.0.90", + "syn 2.0.68", "tempfile", "tokio", "url", @@ -7730,7 +7593,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 1.0.61", + "thiserror", "tracing", "whoami", ] @@ -7771,7 +7634,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 1.0.61", + "thiserror", "tracing", "whoami", ] @@ -7835,12 +7698,44 @@ version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" +[[package]] +name = "structmeta" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ad9e09554f0456d67a69c1584c9798ba733a5b50349a6c0d0948710523922d" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.36", + "structmeta-derive", + "syn 2.0.68", +] + +[[package]] +name = "structmeta-derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a60bcaff7397072dca0017d1db428e30d5002e00b6847703e2e42005c95fbe00" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", +] + [[package]] name = "strum" version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cae14b91c7d11c9a851d3fbc80a963198998c2a64eec840477fa92d8ce9b70bb" +[[package]] +name = "strum" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" +dependencies = [ + "strum_macros 0.24.3", +] + [[package]] name = "strum_macros" version = "0.23.1" @@ -7848,8 +7743,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bb0dc7ee9c15cea6199cde9a127fa16a4c5819af85395457ad72d68edc85a38" dependencies = [ "heck 0.3.3", - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", + "rustversion", + "syn 1.0.109", +] + +[[package]] +name = "strum_macros" +version = "0.24.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" +dependencies = [ + "heck 0.4.1", + "proc-macro2 1.0.86", + "quote 1.0.36", "rustversion", "syn 1.0.109", ] @@ -7892,7 +7800,7 @@ dependencies = [ "decaf377", "futures", "hex", - "http-body 1.0.1", + "http-body", "metrics-tracing-context", "penumbra-asset", "penumbra-keys", @@ -7908,31 +7816,42 @@ dependencies = [ "tokio", "tokio-stream", "tonic", - "tower 0.4.13", + "tower", "tracing", "tracing-subscriber 0.3.18", "url", ] +[[package]] +name = "syn" +version = "0.15.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5" +dependencies = [ + "proc-macro2 0.4.30", + "quote 0.6.13", + "unicode-xid 0.1.0", +] + [[package]] name = "syn" version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "unicode-ident", ] [[package]] name = "syn" -version = "2.0.90" +version = "2.0.68" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" +checksum = "901fa70d88b9d6c98022e23b4136f9f3e54e4662c3bc1bd1d84a42a9a0f0c1e9" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "unicode-ident", ] @@ -7942,8 +7861,8 @@ version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fea305d57546cc8cd04feb14b62ec84bf17f50e3f7b12560d7bfa9265f39d9ed" dependencies = [ - "proc-macro2 1.0.92", - "quote", + "proc-macro2 1.0.86", + "quote 1.0.36", "syn 1.0.109", ] @@ -7954,9 +7873,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" dependencies = [ "proc-macro-error 1.0.4", - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -7965,24 +7884,16 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "sync_wrapper" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" -dependencies = [ - "futures-core", -] - [[package]] name = "synstructure" -version = "0.13.1" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 1.0.109", + "unicode-xid 0.2.4", ] [[package]] @@ -7993,18 +7904,7 @@ checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", "core-foundation", - "system-configuration-sys 0.5.0", -] - -[[package]] -name = "system-configuration" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" -dependencies = [ - "bitflags 2.6.0", - "core-foundation", - "system-configuration-sys 0.6.0", + "system-configuration-sys", ] [[package]] @@ -8017,16 +7917,6 @@ dependencies = [ "libc", ] -[[package]] -name = "system-configuration-sys" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "tap" version = "1.0.1" @@ -8051,16 +7941,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", - "fastrand", - "rustix", + "fastrand 2.1.0", + "rustix 0.38.34", "windows-sys 0.52.0", ] [[package]] name = "tendermint" -version = "0.40.1" +version = "0.34.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9703e34d940c2a293804752555107f8dbe2b84ec4c6dd5203831235868105d2" +checksum = "15ab8f0a25d0d2ad49ac615da054d6a76aa6603ff95f7d18bafdd34450a1a04b" dependencies = [ "bytes", "digest 0.10.7", @@ -8071,6 +7961,7 @@ dependencies = [ "num-traits", "once_cell", "prost", + "prost-types", "serde", "serde_bytes", "serde_json", @@ -8086,23 +7977,23 @@ dependencies = [ [[package]] name = "tendermint-config" -version = "0.40.1" +version = "0.34.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89cc3ea9a39b7ee34eefcff771cc067ecaa0c988c1c5ac08defd878471a06f76" +checksum = "e1a02da769166e2052cd537b1a97c78017632c2d9e19266367b27e73910434fc" dependencies = [ "flex-error", "serde", "serde_json", "tendermint", - "toml 0.8.15", + "toml 0.5.11", "url", ] [[package]] name = "tendermint-light-client-verifier" -version = "0.40.1" +version = "0.34.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0cda4a449fc70985a95f892a67286f13afa4e048d90b8d04a2bf6341e88d1c2" +checksum = "9b8090d0eef9ad57b1b913b5e358e26145c86017e87338136509b94383a4af25" dependencies = [ "derive_more", "flex-error", @@ -8113,13 +8004,16 @@ dependencies = [ [[package]] name = "tendermint-proto" -version = "0.40.1" +version = "0.34.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ae9e1705aa0fa5ecb2c6aa7fb78c2313c4a31158ea5f02048bf318f849352eb" +checksum = "b797dd3d2beaaee91d2f065e7bdf239dc8d80bba4a183a288bc1279dd5a69a1e" dependencies = [ "bytes", "flex-error", + "num-derive", + "num-traits", "prost", + "prost-types", "serde", "serde_bytes", "subtle-encoding", @@ -8128,9 +8022,9 @@ dependencies = [ [[package]] name = "tendermint-rpc" -version = "0.40.1" +version = "0.34.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835a52aa504c63ec05519e31348d3f4ba2fe79493c588e2cad5323d5e81b161a" +checksum = "71afae8bb5f6b14ed48d4e1316a643b6c2c3cbad114f510be77b4ed20b7b3e42" dependencies = [ "async-trait", "bytes", @@ -8140,7 +8034,7 @@ dependencies = [ "peg", "pin-project", "rand", - "reqwest 0.11.27", + "reqwest", "semver", "serde", "serde_bytes", @@ -8150,7 +8044,7 @@ dependencies = [ "tendermint", "tendermint-config", "tendermint-proto", - "thiserror 1.0.61", + "thiserror", "time", "tokio", "tracing", @@ -8186,6 +8080,18 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" +[[package]] +name = "test-strategy" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8361c808554228ad09bfed70f5c823caf8a3450b6881cc3a38eb57e8c08c1d9" +dependencies = [ + "proc-macro2 1.0.86", + "quote 1.0.36", + "structmeta", + "syn 2.0.68", +] + [[package]] name = "textwrap" version = "0.16.1" @@ -8198,16 +8104,7 @@ version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c546c80d6be4bc6a00c0f01730c08df82eaa7a7a61f11d656526506112cc1709" dependencies = [ - "thiserror-impl 1.0.61", -] - -[[package]] -name = "thiserror" -version = "2.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec2a1820ebd077e2b90c4df007bebf344cd394098a13c563957d0afc83ea47" -dependencies = [ - "thiserror-impl 2.0.6", + "thiserror-impl", ] [[package]] @@ -8216,20 +8113,9 @@ version = "1.0.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "thiserror-impl" -version = "2.0.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d65750cab40f4ff1929fb1ba509e9914eb756131cef4210da8d5d700d26f6312" -dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -8300,32 +8186,43 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.42.0" +version = "1.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" +checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a" dependencies = [ "backtrace", "bytes", "libc", - "mio 1.0.3", + "mio", + "num_cpus", "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.7", "tokio-macros", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.48.0", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", ] [[package]] name = "tokio-macros" -version = "2.4.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -8348,21 +8245,11 @@ dependencies = [ "tokio", ] -[[package]] -name = "tokio-rustls" -version = "0.26.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" -dependencies = [ - "rustls 0.23.21", - "tokio", -] - [[package]] name = "tokio-stream" -version = "0.1.17" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -8416,25 +8303,13 @@ version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" dependencies = [ - "indexmap 2.7.0", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", "toml_edit 0.19.15", ] -[[package]] -name = "toml" -version = "0.8.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac2caab0bf757388c6c0ae23b3293fdb463fee59434529014f85e3263b995c28" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit 0.22.16", -] - [[package]] name = "toml_datetime" version = "0.6.6" @@ -8450,11 +8325,11 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.7.0", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", - "winnow 0.5.40", + "winnow", ] [[package]] @@ -8463,62 +8338,45 @@ version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.7.0", - "toml_datetime", - "winnow 0.5.40", -] - -[[package]] -name = "toml_edit" -version = "0.22.16" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "278f3d518e152219c994ce877758516bca5e118eaed6996192a774fb9fbf0788" -dependencies = [ - "indexmap 2.7.0", - "serde", - "serde_spanned", + "indexmap 2.2.6", "toml_datetime", - "winnow 0.6.20", + "winnow", ] [[package]] name = "tonic" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +version = "0.10.2" +source = "git+https://github.com/penumbra-zone/tonic.git?tag=v0.10.3-penumbra#db355dd7029d1404ffa21639f596da58024364f0" dependencies = [ "async-stream", "async-trait", "axum", - "base64 0.22.1", + "base64 0.21.7", "bytes", - "h2 0.4.7", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", - "hyper 1.5.1", + "h2", + "http", + "http-body", + "hyper", "hyper-timeout", - "hyper-util", "percent-encoding", "pin-project", "prost", - "rustls-pemfile 2.2.0", - "socket2", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "tokio", - "tokio-rustls 0.26.1", + "tokio-rustls", "tokio-stream", - "tower 0.4.13", + "tower", "tower-layer", "tower-service", "tracing", - "webpki-roots", + "webpki-roots 0.25.4", ] [[package]] name = "tonic-reflection" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "878d81f52e7fcfd80026b7fdb6a9b578b3c3653ba987f87f0dce4b64043cba27" +version = "0.10.2" +source = "git+https://github.com/penumbra-zone/tonic.git?tag=v0.10.3-penumbra#db355dd7029d1404ffa21639f596da58024364f0" dependencies = [ "prost", "prost-types", @@ -8529,19 +8387,18 @@ dependencies = [ [[package]] name = "tonic-web" -version = "0.12.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5299dd20801ad736dccb4a5ea0da7376e59cd98f213bf1c3d478cf53f4834b58" +version = "0.10.2" +source = "git+https://github.com/penumbra-zone/tonic.git?tag=v0.10.3-penumbra#db355dd7029d1404ffa21639f596da58024364f0" dependencies = [ - "base64 0.22.1", + "base64 0.21.7", "bytes", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", + "http", + "http-body", + "hyper", "pin-project", "tokio-stream", "tonic", - "tower-http 0.5.2", + "tower-http", "tower-layer", "tower-service", "tracing", @@ -8568,27 +8425,11 @@ dependencies = [ "tracing", ] -[[package]] -name = "tower" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" -dependencies = [ - "futures-core", - "futures-util", - "pin-project-lite", - "sync_wrapper 1.0.2", - "tokio", - "tower-layer", - "tower-service", - "tracing", -] - [[package]] name = "tower-abci" -version = "0.18.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f3458c29aae68bfc21edc8482ee7ecce8e5dfbb0d07a8034f96fa304525391f" +checksum = "0d4826f3df3e9a37083d978cae73f020bcdf6143956b7dfc1bd6050b4e16367c" dependencies = [ "bytes", "futures", @@ -8599,7 +8440,7 @@ dependencies = [ "tokio", "tokio-stream", "tokio-util 0.6.10", - "tower 0.4.13", + "tower", "tracing", ] @@ -8611,39 +8452,26 @@ checksum = "b882e5e82ee7440a08335f4d5a2edd9f7678b2cba73eac4826b53c22fd76fdd3" dependencies = [ "futures", "pin-project", - "thiserror 1.0.61", + "thiserror", "tokio", "tokio-util 0.7.11", - "tower 0.4.13", + "tower", "tracing", ] [[package]] name = "tower-http" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9cd434a998747dd2c4276bc96ee2e0c7a2eadf3cae88e52be55a05fa9053f5" -dependencies = [ - "bitflags 2.6.0", - "bytes", - "http 1.2.0", - "http-body 1.0.1", - "http-body-util", - "pin-project-lite", - "tower-layer", - "tower-service", -] - -[[package]] -name = "tower-http" -version = "0.6.2" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" +checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" dependencies = [ "bitflags 2.6.0", "bytes", - "http 1.2.0", - "http-body 1.0.1", + "futures-core", + "futures-util", + "http", + "http-body", + "http-range-header", "pin-project-lite", "tower-layer", "tower-service", @@ -8652,15 +8480,15 @@ dependencies = [ [[package]] name = "tower-layer" -version = "0.3.3" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" +checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" [[package]] name = "tower-service" -version = "0.3.3" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" @@ -8680,9 +8508,9 @@ version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -8817,6 +8645,12 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + [[package]] name = "unicode_categories" version = "0.1.1" @@ -8857,6 +8691,12 @@ dependencies = [ "serde", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + [[package]] name = "uuid" version = "1.9.1" @@ -8892,9 +8732,9 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3fd98999db9227cf28e59d83e1f120f42bc233d4b152e8fab9bc87d5bb1e0f8" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -8906,6 +8746,12 @@ dependencies = [ "libc", ] +[[package]] +name = "waker-fn" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "317211a0dc0ceedd78fb2ca9a44aed3d7b9b26f81870d485c07122b4350673b7" + [[package]] name = "walkdir" version = "2.5.0" @@ -8956,9 +8802,9 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", "wasm-bindgen-shared", ] @@ -8980,7 +8826,7 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ - "quote", + "quote 1.0.36", "wasm-bindgen-macro-support", ] @@ -8990,9 +8836,9 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -9028,23 +8874,17 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.7" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" -dependencies = [ - "rustls-pki-types", -] +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" [[package]] -name = "which" -version = "4.4.2" +name = "webpki-roots" +version = "0.26.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" dependencies = [ - "either", - "home", - "once_cell", - "rustix", + "rustls-pki-types", ] [[package]] @@ -9095,7 +8935,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12342cb4d8e3b046f3d80effd474a7a02447231330ef77d71daa6fbc40681143" dependencies = [ "windows-core 0.57.0", - "windows-targets 0.52.6", + "windows-targets 0.52.5", ] [[package]] @@ -9104,7 +8944,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.52.5", ] [[package]] @@ -9115,8 +8955,8 @@ checksum = "d2ed2439a290666cd67ecce2b0ffaad89c2a56b976b736e6ece670297897832d" dependencies = [ "windows-implement", "windows-interface", - "windows-result 0.1.2", - "windows-targets 0.52.6", + "windows-result", + "windows-targets 0.52.5", ] [[package]] @@ -9125,9 +8965,9 @@ version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9107ddc059d5b6fbfbffdfa7a7fe3e22a226def0b2608f72e9d552763d3e1ad7" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -9136,20 +8976,9 @@ version = "0.57.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29bee4b38ea3cde66011baa44dba677c432a78593e202392d1e9070cf2a7fca7" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", -] - -[[package]] -name = "windows-registry" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" -dependencies = [ - "windows-result 0.2.0", - "windows-strings", - "windows-targets 0.52.6", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -9158,26 +8987,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8" dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-result" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-strings" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" -dependencies = [ - "windows-result 0.2.0", - "windows-targets 0.52.6", + "windows-targets 0.52.5", ] [[package]] @@ -9195,16 +9005,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-sys" -version = "0.59.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" -dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.52.5", ] [[package]] @@ -9224,18 +9025,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb" dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", + "windows_aarch64_gnullvm 0.52.5", + "windows_aarch64_msvc 0.52.5", + "windows_i686_gnu 0.52.5", "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", + "windows_i686_msvc 0.52.5", + "windows_x86_64_gnu 0.52.5", + "windows_x86_64_gnullvm 0.52.5", + "windows_x86_64_msvc 0.52.5", ] [[package]] @@ -9246,9 +9047,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263" [[package]] name = "windows_aarch64_msvc" @@ -9258,9 +9059,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6" [[package]] name = "windows_i686_gnu" @@ -9270,15 +9071,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670" [[package]] name = "windows_i686_gnullvm" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9" [[package]] name = "windows_i686_msvc" @@ -9288,9 +9089,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf" [[package]] name = "windows_x86_64_gnu" @@ -9300,9 +9101,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9" [[package]] name = "windows_x86_64_gnullvm" @@ -9312,9 +9113,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596" [[package]] name = "windows_x86_64_msvc" @@ -9324,9 +9125,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.6" +version = "0.52.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0" [[package]] name = "winnow" @@ -9337,15 +9138,6 @@ dependencies = [ "memchr", ] -[[package]] -name = "winnow" -version = "0.6.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36c1fec1a2bb5866f07c25f68c26e565c4c200aebb96d7e55710c19d3e8ac49b" -dependencies = [ - "memchr", -] - [[package]] name = "winreg" version = "0.50.0" @@ -9367,18 +9159,19 @@ dependencies = [ [[package]] name = "x509-parser" -version = "0.16.0" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcbc162f30700d6f3f82a24bf7cc62ffe7caea42c0b2cba8bf7f3ae50cf51f69" +checksum = "9fb9bace5b5589ffead1afb76e43e34cff39cd0f3ce7e170ae0c29e53b88eb1c" dependencies = [ "asn1-rs", + "base64 0.13.1", "data-encoding", "der-parser", "lazy_static", "nom", "oid-registry", "rusticata-macros", - "thiserror 1.0.61", + "thiserror", "time", ] @@ -9389,8 +9182,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" dependencies = [ "libc", - "linux-raw-sys", - "rustix", + "linux-raw-sys 0.4.14", + "rustix 0.38.34", ] [[package]] @@ -9423,9 +9216,9 @@ version = "0.7.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] @@ -9443,9 +9236,9 @@ version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ - "proc-macro2 1.0.92", - "quote", - "syn 2.0.90", + "proc-macro2 1.0.86", + "quote 1.0.36", + "syn 2.0.68", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index e3b4e4ef93..bef547916b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,6 +12,7 @@ members = [ "crates/bin/pd", "crates/bin/pindexer", "crates/bin/pmonitor", + "crates/cnidarium", "crates/cnidarium-component", "crates/core/app", "crates/core/asset", @@ -122,9 +123,8 @@ ark-std = { default-features = false, version = "0.4" } assert_cmd = { version = "2.0" } async-stream = { version = "0.3.5" } async-trait = { version = "0.1.52" } -axum = { version = "0.7.9" } -axum-extra = { version = "0.9.6" } -axum-server = { version = "0.7.1" } +axum = { version = "0.6" } +axum-server = { version = "0.5" } base64 = { version = "0.21.2" } bech32 = { version = "0.8.1" } bincode = { version = "1.3.3" } @@ -135,7 +135,7 @@ camino = { version = "1" } chacha20poly1305 = { version = "0.9.0" } chrono = { default-features = false, version = "0.4" } clap = { version = "3.2" } -cnidarium = { version = "0.82", default-features = false} +cnidarium = { default-features = false, path = "crates/cnidarium" } cnidarium-component = { default-features = false, path = "crates/cnidarium-component" } cometindex = { path = "crates/util/cometindex" } criterion = { version = "0.4" } @@ -149,26 +149,25 @@ ed25519-consensus = { version = "2.1" } ethnum = { version = "1.3" } futures = { version = "0.3.28" } hex = { version = "0.4.3" } -http = { version = "1.2.0" } -http-body = { version = "1.0.1" } -http-body-util = { version = "0.1.2" } +http = { version = "0.2.9" } +http-body = { version = "0.4.5" } humantime = { version = "2.1" } -ibc-proto = { default-features = false, version = "0.51.1" } -ibc-types = { default-features = false, version = "0.15.0" } +ibc-proto = { default-features = false, version = "0.41.0" } +ibc-types = { default-features = false, version = "0.12.0" } ibig = { version = "0.3" } -ics23 = { version = "0.12.0" } +ics23 = { version = "0.11.3" } im = { version = "^15.1.0" } indicatif = { version = "0.16" } -jmt = { version = "0.11", features = ["migration"] } -metrics = { version = "0.24.1" } -metrics-exporter-prometheus = { version = "0.16", features = ["http-listener"] } -metrics-tracing-context = { version = "0.17.0" } +jmt = { version = "0.10", features = ["migration"] } +metrics = { version = "0.22" } +metrics-exporter-prometheus = { version = "0.13", features = ["http-listener"] } +metrics-tracing-context = { version = "0.15" } num-bigint = { version = "0.4" } num-traits = { default-features = false, version = "0.2.15" } once_cell = { version = "1.8" } parking_lot = { version = "0.12.1" } -pbjson = { version = "0.7.0" } -pbjson-types = { version = "0.7.0" } +pbjson = { version = "0.6" } +pbjson-types = { version = "0.6.0" } penumbra-app = { default-features = false, path = "crates/core/app" } penumbra-asset = { default-features = false, path = "crates/core/asset" } penumbra-community-pool = { default-features = false, path = "crates/core/component/community-pool" } @@ -198,13 +197,14 @@ penumbra-transaction = { default-features = false, path = "crates/co penumbra-txhash = { default-features = false, path = "crates/core/txhash" } penumbra-view = { path = "crates/view" } penumbra-wallet = { path = "crates/wallet" } +penumbra-extension = { path = "crates/penumbra-extension", default-features = false } pin-project = { version = "1.0.12" } pin-project-lite = { version = "0.2.9" } poseidon377 = { version = "1.2.0" } -proptest = { version = "1.6" } -proptest-derive = { version = "0.5.1" } -prost = { version = "0.13.4" } -prost-types = { version = "0.13.4" } +proptest = { version = "1" } +proptest-derive = { version = "0.3" } +prost = { version = "0.12.3" } +prost-types = { version = "0.12" } r2d2 = { version = "0.8" } r2d2_sqlite = { version = "0.25" } rand = { version = "0.8.5" } @@ -212,7 +212,6 @@ rand_chacha = { version = "0.3.1" } rand_core = { version = "0.6.4" } regex = { version = "1.8.1" } rocksdb = { version = "0.21.0" } -rustls = { version = "0.23.21" } serde = { version = "1.0.186" } serde_json = { version = "1.0.96" } serde_unit_struct = { version = "0.1" } @@ -221,25 +220,33 @@ sha2 = { version = "0.10" } sqlx = { version = "0.8", features = ["bigdecimal", "postgres", "runtime-tokio", "tls-rustls"] } tap = "1.0.1" tempfile = { version = "3.3.0" } -tendermint = { default-features = false, version = "0.40.1" } -tendermint-config = { version = "0.40.1" } -tendermint-light-client-verifier = { version = "0.40.1" } -tendermint-proto = { version = "0.40.1" } -tendermint-rpc = { version = "0.40.1" } +tendermint = { default-features = false, version = "0.34.0" } +tendermint-config = { version = "0.34.0" } +tendermint-light-client-verifier = { version = "0.34.0" } +tendermint-proto = { version = "0.34.0" } +tendermint-rpc = { version = "0.34.0" } termion = { version = "3" } thiserror = { version = "1.0" } time = { version = "0.3" } -tokio = { version = "1.39.0" } +tokio = { version = "1.3" } tokio-stream = { version = "0.1.8" } tokio-util = { version = "0.7" } toml = { version = "0.7" } -tonic = { version = "0.12.3" } -tonic-reflection = { version = "0.12.3" } -tonic-web = { version = "0.12.3" } +tonic = { version = "0.10" } +tonic-reflection = { version = "0.10.0" } +tonic-web = { version = "0.10.0" } tower = { version = "0.4.0" } -tower-http = { version = "0.6.2" } +tower-http = { version = "0.4" } tower-service = { version = "0.3.2" } tracing = { version = "0.1" } tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } url = { version = "2.2" } getrandom = { version = "0.2", default-features = false } + +# TODO(kate): +# temporarily point these dependencies to a tag in the penumbra-zone fork. +# see #4392, #4400, and hyperium/tonic#1701 for more information. +[patch.crates-io] +tonic = { git = "https://github.com/penumbra-zone/tonic.git", tag = "v0.10.3-penumbra" } +tonic-reflection = { git = "https://github.com/penumbra-zone/tonic.git", tag = "v0.10.3-penumbra" } +tonic-web = { git = "https://github.com/penumbra-zone/tonic.git", tag = "v0.10.3-penumbra" } diff --git a/crates/bench/Cargo.toml b/crates/bench/Cargo.toml index ec8da243db..dc68edfe71 100644 --- a/crates/bench/Cargo.toml +++ b/crates/bench/Cargo.toml @@ -5,7 +5,7 @@ edition = {workspace = true} [build-dependencies] regex = { version = "1", optional = true } -reqwest = { version = "0.12.9", optional = true, features = [ +reqwest = { version = "0.11.14", optional = true, features = [ "blocking", "json", ] } diff --git a/crates/bin/pcli/Cargo.toml b/crates/bin/pcli/Cargo.toml index bca319b6e3..00b319ca60 100644 --- a/crates/bin/pcli/Cargo.toml +++ b/crates/bin/pcli/Cargo.toml @@ -40,7 +40,6 @@ bincode = {workspace = true} blake2b_simd = {workspace = true} bytes = {workspace = true} camino = {workspace = true} -cnidarium = {workspace = true, features = ["rpc"] } clap = {workspace = true, features = ["derive", "env"]} colored = "2.1.0" colored_json = "4.1" @@ -88,7 +87,6 @@ rand_chacha = {workspace = true} rand_core = {workspace = true, features = ["getrandom"]} regex = {workspace = true} rpassword = "7" -rustls = {workspace = true} serde = {workspace = true, features = ["derive"]} serde_json = {workspace = true} serde_with = {workspace = true, features = ["hex"]} diff --git a/crates/bin/pcli/src/command/query.rs b/crates/bin/pcli/src/command/query.rs index 7851606792..8fe1cfe66f 100644 --- a/crates/bin/pcli/src/command/query.rs +++ b/crates/bin/pcli/src/command/query.rs @@ -13,12 +13,12 @@ mod validator; use auction::AuctionCmd; use base64::prelude::*; use chain::ChainCmd; -use cnidarium::proto::v1::non_verifiable_key_value_request::Key as NVKey; use colored_json::ToColoredJson; use community_pool::CommunityPoolCmd; use dex::DexCmd; use governance::GovernanceCmd; use ibc_query::IbcCmd; +use penumbra_proto::cnidarium::v1::non_verifiable_key_value_request::Key as NVKey; use shielded_pool::ShieldedPool; use tx::Tx; pub(super) use validator::ValidatorCmd; @@ -177,7 +177,7 @@ impl QueryCmd { } => (key.clone(), storage_backend.clone()), }; - use cnidarium::proto::v1::query_service_client::QueryServiceClient; + use penumbra_proto::cnidarium::v1::query_service_client::QueryServiceClient; let mut client = QueryServiceClient::new(app.pd_channel().await?); // Using an enum in the clap arguments was annoying; this is workable: @@ -187,7 +187,7 @@ impl QueryCmd { .decode(&key) .map_err(|e| anyhow::anyhow!(format!("invalid base64: {}", e)))?; - let req = cnidarium::proto::v1::NonVerifiableKeyValueRequest { + let req = penumbra_proto::cnidarium::v1::NonVerifiableKeyValueRequest { key: Some(NVKey { inner: key_bytes }), ..Default::default() }; @@ -205,7 +205,7 @@ impl QueryCmd { } // Default to JMT "jmt" | _ => { - let req = cnidarium::proto::v1::KeyValueRequest { + let req = penumbra_proto::cnidarium::v1::KeyValueRequest { key: key.clone(), // Command-line queries don't have a reason to include proofs as of now. proof: false, @@ -269,10 +269,12 @@ impl QueryCmd { // this code (not just this function, the whole module) is pretty shitty, // but that's maybe okay for the moment. it exists to consume the rpc. async fn watch(key_regex: String, nv_key_regex: String, app: &mut App) -> Result<()> { - use cnidarium::proto::v1::{query_service_client::QueryServiceClient, watch_response as wr}; + use penumbra_proto::cnidarium::v1::{ + query_service_client::QueryServiceClient, watch_response as wr, + }; let mut client = QueryServiceClient::new(app.pd_channel().await?); - let req = cnidarium::proto::v1::WatchRequest { + let req = penumbra_proto::cnidarium::v1::WatchRequest { key_regex, nv_key_regex, }; diff --git a/crates/bin/pcli/src/command/tx.rs b/crates/bin/pcli/src/command/tx.rs index 65fd9b1a70..3b5a943941 100644 --- a/crates/bin/pcli/src/command/tx.rs +++ b/crates/bin/pcli/src/command/tx.rs @@ -1409,7 +1409,7 @@ impl TxCmd { let mut noble_client = CosmosServiceClient::new( Channel::from_shared(noble_node.to_string())? - .tls_config(ClientTlsConfig::new().with_webpki_roots())? + .tls_config(ClientTlsConfig::new())? .connect() .await?, ); diff --git a/crates/bin/pcli/src/main.rs b/crates/bin/pcli/src/main.rs index b6990d0db0..43be79b1bf 100644 --- a/crates/bin/pcli/src/main.rs +++ b/crates/bin/pcli/src/main.rs @@ -5,7 +5,6 @@ use std::fs; use anyhow::{Context, Result}; use clap::Parser; -use rustls::crypto::aws_lc_rs; use pcli::{command::*, opt::Opt}; @@ -22,12 +21,6 @@ async fn main() -> Result<()> { // that tracing is set up even for wallet commands that don't build the `App`. opt.init_tracing(); - // Initialize HTTPS support - // rustls::crypto::aws_lc_rs::default_provider().install_default(); - aws_lc_rs::default_provider() - .install_default() - .expect("failed to initialize rustls support, via aws-lc-rs"); - //Ensure that the data_path exists, in case this is a cold start fs::create_dir_all(&opt.home) .with_context(|| format!("Failed to create home directory {}", opt.home))?; diff --git a/crates/bin/pcli/src/network.rs b/crates/bin/pcli/src/network.rs index f809f7543f..56c7331536 100644 --- a/crates/bin/pcli/src/network.rs +++ b/crates/bin/pcli/src/network.rs @@ -10,9 +10,9 @@ use penumbra_proto::{ }; use penumbra_stake::validator::Validator; use penumbra_transaction::{txhash::TransactionId, Transaction, TransactionPlan}; -use penumbra_view::{ViewClient, ViewServer}; +use penumbra_view::ViewClient; use std::{fs, future::Future}; -use tonic::transport::Channel; +use tonic::transport::{Channel, ClientTlsConfig}; use tracing::instrument; use crate::App; @@ -177,12 +177,19 @@ impl App { Ok(()) } - /// Convenience method for obtaining a `tonic::Channel` for the remote - /// `pd` endpoint, as configured for `pcli`. + // TODO: why do we need this here but not in the view crate? pub async fn pd_channel(&self) -> anyhow::Result { - ViewServer::get_pd_channel(self.config.grpc_url.clone()) - .await - .context(format!("could not connect to {}", self.config.grpc_url)) + match self.config.grpc_url.scheme() { + "http" => Ok(Channel::from_shared(self.config.grpc_url.to_string())? + .connect() + .await?), + "https" => Ok(Channel::from_shared(self.config.grpc_url.to_string())? + .tls_config(ClientTlsConfig::new())? + .connect() + .await?), + other => Err(anyhow::anyhow!("unknown url scheme {other}")) + .with_context(|| format!("could not connect to {}", self.config.grpc_url)), + } } pub async fn tendermint_proxy_client( diff --git a/crates/bin/pclientd/Cargo.toml b/crates/bin/pclientd/Cargo.toml index 69749becbb..eafbf96aa1 100644 --- a/crates/bin/pclientd/Cargo.toml +++ b/crates/bin/pclientd/Cargo.toml @@ -38,7 +38,6 @@ penumbra-view = {workspace = true} prost = {workspace = true} rand = {workspace = true} rand_core = {workspace = true, features = ["getrandom"]} -rustls = {workspace = true} serde = {workspace = true, features = ["derive"]} serde_json = {workspace = true} serde_with = {workspace = true, features = ["hex"]} diff --git a/crates/bin/pclientd/src/lib.rs b/crates/bin/pclientd/src/lib.rs index d198d4d533..5b4be2f4b0 100644 --- a/crates/bin/pclientd/src/lib.rs +++ b/crates/bin/pclientd/src/lib.rs @@ -298,7 +298,11 @@ impl Opt { .load_or_init_sqlite(&config.full_viewing_key, &config.grpc_url) .await?; - let proxy_channel = ViewServer::get_pd_channel(config.grpc_url.clone()).await?; + let proxy_channel = + tonic::transport::Channel::from_shared(config.grpc_url.to_string()) + .expect("this is a valid address") + .connect() + .await?; let app_query_proxy = AppQueryProxy(proxy_channel.clone()); let governance_query_proxy = GovernanceQueryProxy(proxy_channel.clone()); @@ -340,7 +344,7 @@ impl Opt { .register_encoded_file_descriptor_set( penumbra_proto::FILE_DESCRIPTOR_SET, ) - .build_v1() + .build() .with_context(|| "could not configure grpc reflection service")?, )) .serve(config.bind_addr); diff --git a/crates/bin/pclientd/src/main.rs b/crates/bin/pclientd/src/main.rs index 87e12a0723..377028314a 100644 --- a/crates/bin/pclientd/src/main.rs +++ b/crates/bin/pclientd/src/main.rs @@ -3,7 +3,6 @@ use std::io::IsTerminal as _; use anyhow::Result; use clap::Parser; -use rustls::crypto::aws_lc_rs; use tracing_subscriber::{prelude::*, EnvFilter}; use pclientd::Opt; @@ -25,11 +24,5 @@ async fn main() -> Result<()> { let opt = Opt::parse(); - // Initialize HTTPS support - // rustls::crypto::aws_lc_rs::default_provider().install_default(); - aws_lc_rs::default_provider() - .install_default() - .expect("failed to initialize rustls support, via aws-lc-rs"); - opt.exec().await } diff --git a/crates/bin/pclientd/src/proxy.rs b/crates/bin/pclientd/src/proxy.rs index 1a9077f1f4..7ae9bc2d85 100644 --- a/crates/bin/pclientd/src/proxy.rs +++ b/crates/bin/pclientd/src/proxy.rs @@ -1,19 +1,28 @@ use futures::FutureExt; +use http_body::Body as _; use std::convert::Infallible; use std::pin::Pin; use std::{ future::Future, task::{Context, Poll}, }; -use tonic::server::NamedService; -use tonic::{body::BoxBody, transport::Channel}; +use tonic::transport::NamedService; +use tonic::{ + body::BoxBody, + transport::{Body, Channel}, +}; use tower::ServiceExt; fn proxy( channel: Channel, - req: http::Request, + req: http::Request, ) -> Pin, Infallible>> + Send + 'static>> { tracing::debug!(headers = ?req.headers(), "proxying request"); + // Convert request types + let req = req.map(|b| { + b.map_err(|e| tonic::Status::from_error(Box::new(e))) + .boxed_unsync() + }); let rsp = channel.oneshot(req); @@ -21,8 +30,11 @@ fn proxy( // Once we get the response, we need to convert any transport errors into // an Ok(HTTP response reporting an internal error), so we can have Error = Infallible let rsp = match rsp.await { - Ok(rsp) => rsp, - Err(e) => tonic::Status::internal(format!("grpc proxy error: {e}")).into_http(), + Ok(rsp) => rsp.map(|b| { + b.map_err(|e| tonic::Status::from_error(Box::new(e))) + .boxed_unsync() + }), + Err(e) => tonic::Status::internal(format!("grpc proxy error: {e}")).to_http(), }; Ok::<_, Infallible>(rsp) } @@ -36,7 +48,7 @@ impl NamedService for AppQueryProxy { const NAME: &'static str = "penumbra.core.app.v1.QueryService"; } -impl tower::Service> for AppQueryProxy { +impl tower::Service> for AppQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -46,7 +58,7 @@ impl tower::Service> for AppQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -58,7 +70,7 @@ impl NamedService for GovernanceQueryProxy { const NAME: &'static str = "penumbra.core.component.governance.v1.QueryService"; } -impl tower::Service> for GovernanceQueryProxy { +impl tower::Service> for GovernanceQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -68,7 +80,7 @@ impl tower::Service> for GovernanceQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -80,7 +92,7 @@ impl NamedService for DexQueryProxy { const NAME: &'static str = "penumbra.core.component.dex.v1.QueryService"; } -impl tower::Service> for DexQueryProxy { +impl tower::Service> for DexQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -90,7 +102,7 @@ impl tower::Service> for DexQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -102,7 +114,7 @@ impl NamedService for DexSimulationProxy { const NAME: &'static str = "penumbra.core.component.dex.v1.SimulationService"; } -impl tower::Service> for DexSimulationProxy { +impl tower::Service> for DexSimulationProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -112,7 +124,7 @@ impl tower::Service> for DexSimulationProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -124,7 +136,7 @@ impl NamedService for FeeQueryProxy { const NAME: &'static str = "penumbra.core.component.fee.v1.QueryService"; } -impl tower::Service> for FeeQueryProxy { +impl tower::Service> for FeeQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -134,7 +146,7 @@ impl tower::Service> for FeeQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -146,7 +158,7 @@ impl NamedService for SctQueryProxy { const NAME: &'static str = "penumbra.core.component.sct.v1.QueryService"; } -impl tower::Service> for SctQueryProxy { +impl tower::Service> for SctQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -156,7 +168,7 @@ impl tower::Service> for SctQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -168,7 +180,7 @@ impl NamedService for ShieldedPoolQueryProxy { const NAME: &'static str = "penumbra.core.component.shielded_pool.v1.QueryService"; } -impl tower::Service> for ShieldedPoolQueryProxy { +impl tower::Service> for ShieldedPoolQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -178,7 +190,7 @@ impl tower::Service> for ShieldedPoolQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -190,7 +202,7 @@ impl NamedService for ChainQueryProxy { const NAME: &'static str = "penumbra.core.component.chain.v1.QueryService"; } -impl tower::Service> for ChainQueryProxy { +impl tower::Service> for ChainQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -200,7 +212,7 @@ impl tower::Service> for ChainQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -212,7 +224,7 @@ impl NamedService for StakeQueryProxy { const NAME: &'static str = "penumbra.core.component.stake.v1.QueryService"; } -impl tower::Service> for StakeQueryProxy { +impl tower::Service> for StakeQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -222,7 +234,7 @@ impl tower::Service> for StakeQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -234,7 +246,7 @@ impl NamedService for CompactBlockQueryProxy { const NAME: &'static str = "penumbra.core.component.compact_block.v1.QueryService"; } -impl tower::Service> for CompactBlockQueryProxy { +impl tower::Service> for CompactBlockQueryProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -244,7 +256,7 @@ impl tower::Service> for CompactBlockQueryProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } @@ -256,7 +268,7 @@ impl NamedService for TendermintProxyProxy { const NAME: &'static str = "penumbra.util.tendermint_proxy.v1.TendermintProxyService"; } -impl tower::Service> for TendermintProxyProxy { +impl tower::Service> for TendermintProxyProxy { type Response = http::Response; type Error = Infallible; type Future = @@ -266,7 +278,7 @@ impl tower::Service> for TendermintProxyProxy { Poll::Ready(Ok(())) } - fn call(&mut self, req: http::Request) -> Self::Future { + fn call(&mut self, req: http::Request) -> Self::Future { proxy(self.0.clone(), req) } } diff --git a/crates/bin/pd/Cargo.toml b/crates/bin/pd/Cargo.toml index 039f0d58d7..79f0b127db 100644 --- a/crates/bin/pd/Cargo.toml +++ b/crates/bin/pd/Cargo.toml @@ -28,7 +28,7 @@ anyhow = { workspace = true } ark-ff = { workspace = true, default-features = true } async-stream = { workspace = true } async-trait = { workspace = true } -axum = { workspace = true, features = ["http2"] } +axum = "0.6" axum-server = { workspace = true, features = ["tls-rustls"] } base64 = { workspace = true } bincode = { workspace = true } @@ -55,7 +55,7 @@ metrics = { workspace = true } metrics-exporter-prometheus = { workspace = true } metrics-process = "2.0.0" metrics-tracing-context = { workspace = true } -metrics-util = "0.18.0" +metrics-util = "0.16.2" mime_guess = "2" once_cell = { workspace = true } pbjson-types = { workspace = true } @@ -87,9 +87,8 @@ rand = { workspace = true } rand_chacha = { workspace = true } rand_core = { workspace = true, features = ["getrandom"] } regex = { workspace = true } -reqwest = { version = "0.12.9", features = ["json", "stream"] } +reqwest = { version = "0.11", features = ["json", "stream"] } rocksdb = { workspace = true } -rustls = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true } serde_with = { workspace = true, features = ["hex"] } @@ -109,9 +108,9 @@ tonic = { workspace = true } tonic-reflection = { workspace = true } tonic-web = { workspace = true } tower = { workspace = true, features = ["full"] } -tower-abci = "0.18" +tower-abci = "0.11" tower-actor = "0.1.0" -tower-http = { workspace = true, features = ["cors", "trace"] } +tower-http = { workspace = true } tower-service = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true, features = ["env-filter", "ansi"] } @@ -125,4 +124,4 @@ penumbra-proof-params = { workspace = true, features = [ ], default-features = true } assert_cmd = { workspace = true } predicates = "2.1" -prost-reflect = "0.14.3" +prost-reflect = "0.13.1" diff --git a/crates/bin/pd/src/main.rs b/crates/bin/pd/src/main.rs index b852a1d0ea..59bca2cdbc 100644 --- a/crates/bin/pd/src/main.rs +++ b/crates/bin/pd/src/main.rs @@ -21,14 +21,10 @@ use pd::{ }; use penumbra_app::app_version::check_and_update_app_version; use penumbra_app::{APP_VERSION, SUBSTORE_PREFIXES}; -use penumbra_tower_trace::remote_addr; use rand::Rng; use rand_core::OsRng; -use rustls::crypto::aws_lc_rs; use tendermint_config::net::Address as TendermintAddress; -use tower::ServiceBuilder; use tower_http::cors::CorsLayer; -use tower_http::trace::TraceLayer; use tracing::Instrument as _; use tracing_subscriber::{prelude::*, EnvFilter}; use url::Url; @@ -55,12 +51,6 @@ async fn main() -> anyhow::Result<()> { .with(metrics_layer); registry.init(); - // Initialize HTTPS support - // We log the error and continue, so the node is operational. - if let Err(e) = aws_lc_rs::default_provider().install_default() { - tracing::error!("failed to initialize rustls support: {:?}", e); - } - tracing::info!(?cmd, version = env!("CARGO_PKG_VERSION"), "running command"); match cmd { RootCommand::Start { @@ -139,26 +129,15 @@ async fn main() -> anyhow::Result<()> { ); let tm_proxy = penumbra_tendermint_proxy::TendermintProxy::new(cometbft_addr); - - let grpc_routes = penumbra_app::rpc::routes(&storage, tm_proxy, enable_expensive_rpc)? - .into_axum_router() - .layer( - ServiceBuilder::new().layer(TraceLayer::new_for_grpc().make_span_with( - |req: &http::Request<_>| match remote_addr(req) { - Some(remote_addr) => { - tracing::error_span!("grpc", ?remote_addr) - } - None => tracing::error_span!("grpc"), - }, - )), - ); + let grpc_server = penumbra_app::rpc::router(&storage, tm_proxy, enable_expensive_rpc)?; // Create Axum routes for the frontend app. let frontend = pd::zipserve::router("/app/", pd::MINIFRONT_ARCHIVE_BYTES); let node_status = pd::zipserve::router("/", pd::NODE_STATUS_ARCHIVE_BYTES); // Now we drop down a layer of abstraction, from tonic to axum, and merge handlers. - let router = grpc_routes + let router = grpc_server + .into_router() .merge(frontend) .merge(node_status) // Set rather permissive CORS headers for pd's gRPC: the service @@ -171,13 +150,6 @@ async fn main() -> anyhow::Result<()> { // Now start the GRPC server, initializing an ACME client to use as a certificate // resolver if auto-https has been enabled. if auto-https is not enabled, we will // instead spawn a future that will never return. - - // TODO(janis): Is `axum_server::bind` sufficient to accept http1 (for grpc-web) and - // http2 (for grpc) requests? - // - // See also this (about axum::serve, suggesting that it just works out of the box; does that - // apply to axum_server::bind as well?) - // https://github.com/tokio-rs/axum/blob/c596deafe48ed608775e312eef7d12ddbb0fd424/examples/websockets-http2/src/main.rs#L57-L59 let grpc_server = axum_server::bind(grpc_bind); let (grpc_server, acme_worker) = match grpc_auto_https { Some(domain) => { diff --git a/crates/bin/pmonitor/Cargo.toml b/crates/bin/pmonitor/Cargo.toml index bdc18cbb05..f9799f9f91 100644 --- a/crates/bin/pmonitor/Cargo.toml +++ b/crates/bin/pmonitor/Cargo.toml @@ -31,7 +31,6 @@ penumbra-stake = {workspace = true, default-features = false} penumbra-tct = {workspace = true, default-features = false} penumbra-view = {workspace = true} regex = {workspace = true} -rustls = {workspace = true} serde = {workspace = true, features = ["derive"]} serde_json = {workspace = true} tokio = {workspace = true, features = ["full"]} diff --git a/crates/bin/pmonitor/src/main.rs b/crates/bin/pmonitor/src/main.rs index d8d448c0c0..372f425adb 100644 --- a/crates/bin/pmonitor/src/main.rs +++ b/crates/bin/pmonitor/src/main.rs @@ -20,11 +20,10 @@ use clap::{self, Parser}; use directories::ProjectDirs; use futures::StreamExt; use penumbra_asset::STAKING_TOKEN_ASSET_ID; -use rustls::crypto::aws_lc_rs; use std::fs; use std::io::IsTerminal as _; use std::str::FromStr; -use tonic::transport::Channel; +use tonic::transport::{Channel, ClientTlsConfig}; use tracing_subscriber::{prelude::*, EnvFilter}; use url::Url; use uuid::Uuid; @@ -87,12 +86,6 @@ fn init_tracing() -> anyhow::Result<()> { async fn main() -> Result<()> { let opt = Opt::parse(); init_tracing()?; - - // Initialize HTTPS support - aws_lc_rs::default_provider() - .install_default() - .expect("failed to initialize rustls support, via aws-lc-rs"); - tracing::info!(?opt, version = env!("CARGO_PKG_VERSION"), "running command"); opt.exec().await } @@ -233,6 +226,21 @@ impl Opt { compact_block.try_into() } + /// Stolen from pcli + pub async fn pd_channel(&self, grpc_url: Url) -> anyhow::Result { + match grpc_url.scheme() { + "http" => Ok(Channel::from_shared(grpc_url.to_string())? + .connect() + .await?), + "https" => Ok(Channel::from_shared(grpc_url.to_string())? + .tls_config(ClientTlsConfig::new())? + .connect() + .await?), + other => Err(anyhow::anyhow!("unknown url scheme {other}")) + .with_context(|| format!("could not connect to {}", grpc_url)), + } + } + /// Create wallet given a path and fvk pub async fn create_wallet( &self, @@ -395,7 +403,7 @@ impl Opt { ))?)?; let mut stake_client = StakeQueryServiceClient::new( - ViewServer::get_pd_channel(pmonitor_config.grpc_url()).await?, + self.pd_channel(pmonitor_config.grpc_url()).await?, ); // Sync each wallet to the latest block height, check for new migrations, and check the balance. diff --git a/crates/cnidarium/Cargo.toml b/crates/cnidarium/Cargo.toml new file mode 100644 index 0000000000..bd039898d7 --- /dev/null +++ b/crates/cnidarium/Cargo.toml @@ -0,0 +1,46 @@ +[package] +name = "cnidarium" +version = {workspace = true} +edition = {workspace = true} + +[features] +migration = [] +migration-proptests = ["migration"] +default = ["metrics"] +rpc = ["dep:tonic", "dep:prost", "dep:serde", "dep:pbjson", "dep:ibc-proto"] + +[dependencies] +anyhow = {workspace = true} +async-trait = {workspace = true} +base64 = {workspace = true} +borsh = { version = "1.3.0" , features = ["derive", "de_strict_order"]} +futures = {workspace = true} +hex = {workspace = true} +ibc-proto = {workspace = true, default-features = false, features = ["serde"], optional = true} +ibc-types = {workspace = true, default-features = false, features = ["std"]} +ics23 = {workspace = true} +jmt = {workspace = true} +metrics = {workspace = true, optional = true} +once_cell = {workspace = true} +parking_lot = {workspace = true} +pbjson = {workspace = true, optional = true} +pin-project = {workspace = true} +prost = {workspace = true, optional = true} +regex = {workspace = true} +rocksdb = {workspace = true} +serde = {workspace = true, optional = true} +sha2 = {workspace = true} +smallvec = { version = "1.10", features = ["union", "const_generics"] } +tempfile = {workspace = true} +tendermint = {workspace = true, default-features = false} +tokio = {workspace = true, features = ["full", "tracing"]} +tokio-stream = {workspace = true} +tonic = {workspace = true, optional = true} +tracing = {workspace = true} + +[dev-dependencies] +tempfile = { workspace = true } +tracing-subscriber = { workspace = true } +tokio = { workspace = true, features = ["full", "rt-multi-thread"] } +proptest = "1.3.1" +test-strategy = "0.3.1" diff --git a/crates/cnidarium/src/cache.rs b/crates/cnidarium/src/cache.rs new file mode 100644 index 0000000000..f9817d7716 --- /dev/null +++ b/crates/cnidarium/src/cache.rs @@ -0,0 +1,125 @@ +use std::{any::Any, collections::BTreeMap, sync::Arc}; + +use tendermint::abci; + +use crate::{ + store::{multistore::MultistoreConfig, substore::SubstoreConfig}, + StateWrite, +}; + +/// A cache of changes to the state of the blockchain. +/// +/// A [`StateDelta`](crate::StateDelta) is `Cache` above a `StateRead`. +#[derive(Default, Debug)] +pub struct Cache { + /// Unwritten changes to the consensus-critical state (stored in the JMT). + pub(crate) unwritten_changes: BTreeMap>>, + /// Unwritten changes to non-consensus-critical state (stored in the nonverifiable storage). + pub(crate) nonverifiable_changes: BTreeMap, Option>>, + /// Unwritten changes to the object store. A `None` value means a deletion. + pub(crate) ephemeral_objects: BTreeMap<&'static str, Option>>, + /// A list of ABCI events that occurred while building this set of state changes. + pub(crate) events: Vec, +} + +impl Cache { + /// Inspect the cache of unwritten changes to the verifiable state. + pub fn unwritten_changes(&self) -> &BTreeMap>> { + &self.unwritten_changes + } + + /// Inspect the cache of unwritten changes to the nonverifiable state. + pub fn nonverifiable_changes(&self) -> &BTreeMap, Option>> { + &self.nonverifiable_changes + } + + /// Merge the given cache with this one, taking its writes in place of ours. + pub fn merge(&mut self, other: Cache) { + // One might ask, why does this exist separately from `apply_to`? The + // answer is that `apply_to` takes a `StateWrite`, so we'd have to have + // `Cache: StateWrite`, and that implies `Cache: StateRead`, but the + // `StateRead` trait assumes asynchronous access, and in any case, we + // probably don't want to be reading directly from a `Cache` (?) + self.unwritten_changes.extend(other.unwritten_changes); + self.nonverifiable_changes + .extend(other.nonverifiable_changes); + self.ephemeral_objects.extend(other.ephemeral_objects); + self.events.extend(other.events); + } + + /// Consume this cache, applying its writes to the given state. + pub fn apply_to(self, mut state: S) { + for (key, value) in self.unwritten_changes { + if let Some(value) = value { + state.put_raw(key, value); + } else { + state.delete(key); + } + } + + for (key, value) in self.nonverifiable_changes { + if let Some(value) = value { + state.nonverifiable_put_raw(key, value); + } else { + state.nonverifiable_delete(key); + } + } + + // It's important to use object_merge here, so that we don't re-box all + // of the objects, causing downcasting to fail. + state.object_merge(self.ephemeral_objects); + + for event in self.events { + state.record(event); + } + } + + /// Returns `true` if there are cached writes on top of the snapshot, and `false` otherwise. + pub fn is_dirty(&self) -> bool { + !(self.unwritten_changes.is_empty() + && self.nonverifiable_changes.is_empty() + && self.ephemeral_objects.is_empty()) + } + + /// Extracts and returns the ABCI events contained in this cache. + pub fn take_events(&mut self) -> Vec { + std::mem::take(&mut self.events) + } + + /// Consumes a `Cache` and returns a map of `SubstoreConfig` to `Cache` that + /// corresponds to changes belonging to each substore. The keys in each `Cache` + /// are truncated to remove the substore prefix. + pub fn shard_by_prefix( + self, + prefixes: &MultistoreConfig, + ) -> BTreeMap, Self> { + let mut changes_by_substore = BTreeMap::new(); + for (key, some_value) in self.unwritten_changes.into_iter() { + let (truncated_key, substore_config) = prefixes.route_key_str(&key); + changes_by_substore + .entry(substore_config) + .or_insert_with(Cache::default) + .unwritten_changes + .insert(truncated_key.to_string(), some_value); + } + + for (key, some_value) in self.nonverifiable_changes { + let (truncated_key, substore_config) = prefixes.route_key_bytes(&key); + changes_by_substore + .entry(substore_config) + .or_insert_with(Cache::default) + .nonverifiable_changes + .insert(truncated_key.to_vec(), some_value); + } + changes_by_substore + } + + pub(crate) fn clone_changes(&self) -> Self { + Self { + unwritten_changes: self.unwritten_changes.clone(), + nonverifiable_changes: self.nonverifiable_changes.clone(), + ephemeral_objects: Default::default(), + events: Default::default(), + } + } +} diff --git a/crates/cnidarium/src/delta.rs b/crates/cnidarium/src/delta.rs new file mode 100644 index 0000000000..1abc12d77d --- /dev/null +++ b/crates/cnidarium/src/delta.rs @@ -0,0 +1,513 @@ +use std::{any::Any, sync::Arc}; + +use futures::StreamExt; +use parking_lot::RwLock; +use tendermint::abci; + +use crate::{ + future::{ + CacheFuture, StateDeltaNonconsensusPrefixRawStream, StateDeltaNonconsensusRangeRawStream, + StateDeltaPrefixKeysStream, StateDeltaPrefixRawStream, + }, + utils, Cache, EscapedByteSlice, StateRead, StateWrite, +}; + +/// An arbitrarily-deeply nested stack of delta updates to an underlying state. +/// +/// This API allows exploring a tree of possible execution paths concurrently, +/// before finally selecting one and applying it to the underlying state. +/// +/// Using this API requires understanding its invariants. +/// +/// On creation, `StateDelta::new` takes ownership of a `StateRead + StateWrite` +/// instance, acquiring a "write lock" over the underlying state (since `&mut S` +/// is `StateWrite` if `S: StateWrite`, it's possible to pass a unique +/// reference). +/// +/// The resulting `StateDelta` instance is a "leaf" state, and can be used for +/// reads and writes, following the some execution path. +/// +/// When two potential execution paths diverge, `delta.fork()` can be used to +/// fork the state update. The new forked `StateDelta` will include all +/// previous state writes made to the original (and its ancestors). Any writes +/// made to the original `StateDelta` after `fork()` is called will not be seen +/// by the forked state. +/// +/// Finally, after some execution path has been selected, calling +/// `delta.apply()` on one of the possible state updates will commit the changes +/// to the underlying state instance, and invalidate all other delta updates in +/// the same family. It is a programming error to use the other delta updates +/// after `apply()` has been called, but ideally this should not be a problem in +/// practice: the API is intended to explore a tree of possible execution paths; +/// once one has been selected, the others should be discarded. +#[derive(Debug)] +pub struct StateDelta { + /// The underlying state instance. + /// + /// The Arc<_> allows it to be shared between different stacks of delta updates, + /// and the RwLock> allows it to be taken out when it's time to commit + /// the changes from one of the stacks. + state: Arc>>, + /// A stack of intermediate delta updates, with the "top" layers first. + /// + /// We store all the layers directly, rather than using a recursive structure, + /// so that the type doesn't depend on how many layers are involved. We're only + /// duplicating the Arc<_>, so this should be cheap. + layers: Vec>>>, + /// The final delta update in the stack, the one we're currently working on. + /// Storing this separately allows us to avoid lock contention during writes. + /// In fact, this data shouldn't usually be shared at all; the only reason it's + /// wrapped this way is so that prefix streams can have 'static lifetimes. + /// We option-wrap it so it can be chained with the layers; it will never be None. + leaf_cache: Arc>>, +} + +impl StateDelta { + /// Create a new tree of possible updates to an underlying `state`. + pub fn new(state: S) -> Self { + Self { + state: Arc::new(RwLock::new(Some(state))), + layers: Vec::default(), + leaf_cache: Arc::new(RwLock::new(Some(Cache::default()))), + } + } + + /// Fork execution, returning a new child state that includes all previous changes. + pub fn fork(&mut self) -> Self { + // If we have writes in the leaf cache, we'll move them to a new layer, + // ensuring that the new child only sees writes made to this state + // *before* fork was called, and not after. + // + // Doing this only when the leaf cache is dirty means that we don't + // add empty layers in repeated fork() calls without intervening writes. + if self + .leaf_cache + .read() + .as_ref() + .expect("unable to get ref to leaf cache, storage not initialized?") + .is_dirty() + { + let new_layer = std::mem::replace( + &mut self.leaf_cache, + Arc::new(RwLock::new(Some(Cache::default()))), + ); + self.layers.push(new_layer); + } + + Self { + state: self.state.clone(), + layers: self.layers.clone(), + leaf_cache: Arc::new(RwLock::new(Some(Cache::default()))), + } + } + + /// Flatten all changes in this branch of the tree into a single [`Cache`], + /// invalidating all other branches of the tree and releasing the underlying + /// state back to the caller. + /// + /// The [`apply`](Self::apply) method is a convenience wrapper around this + /// that applies the changes to the underlying state. + pub fn flatten(self) -> (S, Cache) { + tracing::trace!("flattening branch"); + // Take ownership of the underlying state, immediately invalidating all + // other delta stacks in the same family. + let state = self + .state + .write() + .take() + .expect("apply must be called only once"); + + // Flatten the intermediate layers into a single cache, applying them from oldest + // (bottom) to newest (top), so that newer writes clobber old ones. + let mut changes = Cache::default(); + for layer in self.layers { + let cache = layer + .write() + .take() + .expect("cache must not have already been applied"); + changes.merge(cache); + } + // Last, apply the changes in the leaf cache. + changes.merge( + self.leaf_cache + .write() + .take() + .expect("unable to take leaf cache, was it already applied?"), + ); + + (state, changes) + } +} + +impl StateDelta { + /// Apply all changes in this branch of the tree to the underlying state, + /// releasing it back to the caller and invalidating all other branches of + /// the tree. + pub fn apply(self) -> (S, Vec) { + let (mut state, mut changes) = self.flatten(); + let events = changes.take_events(); + + // Apply the flattened changes to the underlying state. + changes.apply_to(&mut state); + + // Finally, return ownership of the state back to the caller. + (state, events) + } +} + +impl StateDelta> { + pub fn try_apply(self) -> anyhow::Result<(S, Vec)> { + let (arc_state, mut changes) = self.flatten(); + let events = std::mem::take(&mut changes.events); + + if let Ok(mut state) = Arc::try_unwrap(arc_state) { + // Apply the flattened changes to the underlying state. + changes.apply_to(&mut state); + + // Finally, return ownership of the state back to the caller. + Ok((state, events)) + } else { + Err(anyhow::anyhow!("did not have unique ownership of Arc")) + } + } +} + +impl StateRead for StateDelta { + type GetRawFut = CacheFuture; + type PrefixRawStream = StateDeltaPrefixRawStream; + type PrefixKeysStream = StateDeltaPrefixKeysStream; + type NonconsensusPrefixRawStream = + StateDeltaNonconsensusPrefixRawStream; + type NonconsensusRangeRawStream = + StateDeltaNonconsensusRangeRawStream; + + fn get_raw(&self, key: &str) -> Self::GetRawFut { + // Check if we have a cache hit in the leaf cache. + if let Some(entry) = self + .leaf_cache + .read() + .as_ref() + .expect("delta must not have been applied") + .unwritten_changes + .get(key) + { + return CacheFuture::hit(entry.clone()); + } + + // Iterate through the stack, top to bottom, to see if we have a cache hit. + for layer in self.layers.iter().rev() { + if let Some(entry) = layer + .read() + .as_ref() + .expect("delta must not have been applied") + .unwritten_changes + .get(key) + { + return CacheFuture::hit(entry.clone()); + } + } + + // If we got here, the key must be in the underlying state or not present at all. + CacheFuture::miss( + self.state + .read() + .as_ref() + .expect("delta must not have been applied") + .get_raw(key), + ) + } + + fn nonverifiable_get_raw(&self, key: &[u8]) -> Self::GetRawFut { + // Check if we have a cache hit in the leaf cache. + if let Some(entry) = self + .leaf_cache + .read() + .as_ref() + .expect("delta must not have been applied") + .nonverifiable_changes + .get(key) + { + return CacheFuture::hit(entry.clone()); + } + + // Iterate through the stack, top to bottom, to see if we have a cache hit. + for layer in self.layers.iter().rev() { + if let Some(entry) = layer + .read() + .as_ref() + .expect("delta must not have been applied") + .nonverifiable_changes + .get(key) + { + return CacheFuture::hit(entry.clone()); + } + } + + // If we got here, the key must be in the underlying state or not present at all. + CacheFuture::miss( + self.state + .read() + .as_ref() + .expect("delta must not have been applied") + .nonverifiable_get_raw(key), + ) + } + + fn object_type(&self, key: &'static str) -> Option { + // Check if we have a cache hit in the leaf cache. + if let Some(entry) = self + .leaf_cache + .read() + .as_ref() + .expect("delta must not have been applied") + .ephemeral_objects + .get(key) + { + // We have to explicitly call `Any::type_id(&**v)` here because this ensures that we are + // asking for the type of the `Any` *inside* the `Box`, rather than the type of + // `Box` itself. + return entry.as_ref().map(|v| std::any::Any::type_id(&**v)); + } + + // Iterate through the stack, top to bottom, to see if we have a cache hit. + for layer in self.layers.iter().rev() { + if let Some(entry) = layer + .read() + .as_ref() + .expect("delta must not have been applied") + .ephemeral_objects + .get(key) + { + // We have to explicitly call `Any::type_id(&**v)` here because this ensures that we are + // asking for the type of the `Any` *inside* the `Box`, rather than the type of + // `Box` itself. + return entry.as_ref().map(|v| std::any::Any::type_id(&**v)); + } + } + + // Fall through to the underlying store. + self.state + .read() + .as_ref() + .expect("delta must not have been applied") + .object_type(key) + } + + fn object_get(&self, key: &'static str) -> Option { + // Check if we have a cache hit in the leaf cache. + if let Some(entry) = self + .leaf_cache + .read() + .as_ref() + .expect("delta must not have been applied") + .ephemeral_objects + .get(key) + { + return entry + .as_ref() + .map(|v| { + v.downcast_ref().unwrap_or_else(|| panic!("unexpected type for key \"{key}\" in `StateDelta::object_get`: expected type {}", std::any::type_name::())) + }) + .cloned(); + } + + // Iterate through the stack, top to bottom, to see if we have a cache hit. + for layer in self.layers.iter().rev() { + if let Some(entry) = layer + .read() + .as_ref() + .expect("delta must not have been applied") + .ephemeral_objects + .get(key) + { + return entry + .as_ref() + .map(|v| { + v.downcast_ref().unwrap_or_else(|| panic!("unexpected type for key \"{key}\" in `StateDelta::object_get`: expected type {}", std::any::type_name::())) + }).cloned(); + } + } + + // Fall through to the underlying store. + self.state + .read() + .as_ref() + .expect("delta must not have been applied") + .object_get(key) + } + + fn prefix_raw(&self, prefix: &str) -> Self::PrefixRawStream { + let underlying = self + .state + .read() + .as_ref() + .expect("delta must not have been applied") + .prefix_raw(prefix) + .peekable(); + StateDeltaPrefixRawStream { + underlying, + layers: self.layers.clone(), + leaf_cache: self.leaf_cache.clone(), + last_key: None, + prefix: prefix.to_owned(), + } + } + + fn prefix_keys(&self, prefix: &str) -> Self::PrefixKeysStream { + let underlying = self + .state + .read() + .as_ref() + .expect("delta must not have been applied") + .prefix_keys(prefix) + .peekable(); + StateDeltaPrefixKeysStream { + underlying, + layers: self.layers.clone(), + leaf_cache: self.leaf_cache.clone(), + last_key: None, + prefix: prefix.to_owned(), + } + } + + fn nonverifiable_prefix_raw(&self, prefix: &[u8]) -> Self::NonconsensusPrefixRawStream { + let underlying = self + .state + .read() + .as_ref() + .expect("delta must not have been applied") + .nonverifiable_prefix_raw(prefix) + .peekable(); + StateDeltaNonconsensusPrefixRawStream { + underlying, + layers: self.layers.clone(), + leaf_cache: self.leaf_cache.clone(), + last_key: None, + prefix: prefix.to_vec(), + } + } + + fn nonverifiable_range_raw( + &self, + prefix: Option<&[u8]>, + range: impl std::ops::RangeBounds>, + ) -> anyhow::Result { + let (range, (start, end)) = utils::convert_bounds(range)?; + let underlying = self + .state + .read() + .as_ref() + .expect("delta must not have been applied") + .nonverifiable_range_raw(prefix, range)? + .peekable(); + Ok(StateDeltaNonconsensusRangeRawStream { + underlying, + layers: self.layers.clone(), + leaf_cache: self.leaf_cache.clone(), + last_key: None, + prefix: prefix.map(|p| p.to_vec()), + range: (start, end), + }) + } +} + +impl StateWrite for StateDelta { + fn put_raw(&mut self, key: String, value: jmt::OwnedValue) { + self.leaf_cache + .write() + .as_mut() + .expect("delta must not have been applied") + .unwritten_changes + .insert(key, Some(value)); + } + + fn delete(&mut self, key: String) { + self.leaf_cache + .write() + .as_mut() + .expect("delta must not have been applied") + .unwritten_changes + .insert(key, None); + } + + fn nonverifiable_delete(&mut self, key: Vec) { + tracing::trace!(key = ?EscapedByteSlice(&key), "deleting key"); + self.leaf_cache + .write() + .as_mut() + .expect("delta must not have been applied") + .nonverifiable_changes + .insert(key, None); + } + + fn nonverifiable_put_raw(&mut self, key: Vec, value: Vec) { + tracing::trace!(key = ?EscapedByteSlice(&key), value = ?EscapedByteSlice(&value), "insert nonverifiable change"); + self.leaf_cache + .write() + .as_mut() + .expect("delta must not have been applied") + .nonverifiable_changes + .insert(key, Some(value)); + } + + fn object_put(&mut self, key: &'static str, value: T) { + if let Some(previous_type) = self.object_type(key) { + if std::any::TypeId::of::() != previous_type { + panic!( + "unexpected type for key \"{key}\" in `StateDelta::object_put`: expected type {expected}", + expected = std::any::type_name::(), + ); + } + } + self.leaf_cache + .write() + .as_mut() + .expect("delta must not have been applied") + .ephemeral_objects + .insert(key, Some(Box::new(value))); + } + + fn object_delete(&mut self, key: &'static str) { + self.leaf_cache + .write() + .as_mut() + .expect("delta must not have been applied") + .ephemeral_objects + .insert(key, None); + } + + fn object_merge( + &mut self, + objects: std::collections::BTreeMap<&'static str, Option>>, + ) { + self.leaf_cache + .write() + .as_mut() + .expect("delta must not have been applied") + .ephemeral_objects + .extend(objects); + } + + fn record(&mut self, event: abci::Event) { + self.leaf_cache + .write() + .as_mut() + .expect("delta must not have been applied") + .events + .push(event) + } +} + +/// Extension trait providing `try_begin_transaction()` on `Arc>`. +pub trait ArcStateDeltaExt: Sized { + type S: StateRead; + /// Attempts to begin a transaction on this `Arc`, returning `None` if the `Arc` is shared. + fn try_begin_transaction(&'_ mut self) -> Option>>; +} + +impl ArcStateDeltaExt for Arc> { + type S = S; + fn try_begin_transaction(&'_ mut self) -> Option>> { + Arc::get_mut(self).map(StateDelta::new) + } +} diff --git a/crates/cnidarium/src/escaped_byte_slice.rs b/crates/cnidarium/src/escaped_byte_slice.rs new file mode 100644 index 0000000000..de75839642 --- /dev/null +++ b/crates/cnidarium/src/escaped_byte_slice.rs @@ -0,0 +1,34 @@ +/// A wrapper type for a byte slice that implements `Debug` by escaping +/// non-printable bytes. +/// +/// This is exposed as part of the public API for convenience of downstream +/// users' debugging of state accesses. +pub struct EscapedByteSlice<'a>(pub &'a [u8]); + +impl<'a> std::fmt::Debug for EscapedByteSlice<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "b\"")?; + for &b in self.0 { + // https://doc.rust-lang.org/reference/tokens.html#byte-escapes + #[allow(clippy::manual_range_contains)] + if b == b'\n' { + write!(f, "\\n")?; + } else if b == b'\r' { + write!(f, "\\r")?; + } else if b == b'\t' { + write!(f, "\\t")?; + } else if b == b'\\' || b == b'"' { + write!(f, "\\{}", b as char)?; + } else if b == b'\0' { + write!(f, "\\0")?; + // ASCII printable + } else if b >= 0x20 && b < 0x7f { + write!(f, "{}", b as char)?; + } else { + write!(f, "\\x{:02x}", b)?; + } + } + write!(f, "\"")?; + Ok(()) + } +} diff --git a/crates/cnidarium/src/future.rs b/crates/cnidarium/src/future.rs new file mode 100644 index 0000000000..b5858972e3 --- /dev/null +++ b/crates/cnidarium/src/future.rs @@ -0,0 +1,763 @@ +//! Concrete futures types used by the storage crate. + +use anyhow::Result; +use futures::{ + future::{Either, Ready}, + stream::Peekable, + Stream, +}; +use parking_lot::RwLock; +use pin_project::pin_project; +use smallvec::SmallVec; +use std::{ + future::Future, + ops::Bound, + pin::Pin, + sync::Arc, + task::{ready, Context, Poll}, +}; + +use crate::Cache; + +/// Future representing a read from a state snapshot. +#[pin_project] +pub struct SnapshotFuture(#[pin] pub(crate) tokio::task::JoinHandle>>>); + +impl Future for SnapshotFuture { + type Output = Result>>; + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.project(); + match this.0.poll(cx) { + Poll::Ready(result) => { + Poll::Ready(result.expect("unrecoverable join error from tokio task")) + } + Poll::Pending => Poll::Pending, + } + } +} + +/// Future representing a read from an in-memory cache over an underlying state. +#[pin_project] +pub struct CacheFuture { + #[pin] + inner: Either>>>, F>, +} + +impl CacheFuture { + pub(crate) fn hit(value: Option>) -> Self { + Self { + inner: Either::Left(futures::future::ready(Ok(value))), + } + } + + pub(crate) fn miss(underlying: F) -> Self { + Self { + inner: Either::Right(underlying), + } + } +} + +impl Future for CacheFuture +where + F: Future>>>, +{ + type Output = Result>>; + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + let this = self.project(); + this.inner.poll(cx) + } +} + +#[pin_project] +pub struct StateDeltaNonconsensusPrefixRawStream +where + St: Stream, Vec)>>, +{ + #[pin] + pub(crate) underlying: Peekable, + pub(crate) layers: Vec>>>, + pub(crate) leaf_cache: Arc>>, + pub(crate) last_key: Option>, + pub(crate) prefix: Vec, +} + +impl Stream for StateDeltaNonconsensusPrefixRawStream +where + St: Stream, Vec)>>, +{ + type Item = Result<(Vec, Vec)>; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + // This implementation interleaves items from the underlying stream with + // items in cache layers. To do this, it tracks the last key it + // returned, then, for each item in the underlying stream, searches for + // cached keys that lie between the last-returned key and the item's key, + // checking whether the cached key represents a deletion requiring further + // scanning. This process is illustrated as follows: + // + // â—‡ skip â—‡ skip â–² yield â–² yield â–² yield + // │ │ │ │ │ + // â–‘ pick ──────────────▶ â–‘ pick ────────▶ â–ˆ pick ────────▶ â–ˆ pick ─────────▶ â–ˆ pick + // â–² â–² â–² â–² â–² + // â–² │ â–² │ â–² │ â–² │ â–² │ + // write│ │ │ │ │ │ │ │ │ │ + // layer│ │ â–ˆ │ │ â–ˆ │ │█ │ â–ˆ │ â–ˆ │ + // │ │ â–‘ │ â–‘ │ ░│ │ â–‘ │ â–‘ │ + // │ â–‘ │ â–‘ │ â–‘ │ │ â–‘ │ â–‘ │ + // │ â–ˆ │ â–ˆ │ █│ │ â–ˆ │ â–ˆ │ + // │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ + // │ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ + // ─┼(─────]────keys─▶ ─┼──(───]────▶ ─┼────(─]────▶ ─┼─────(]────▶ ─┼──────(──]─▶ + // │ â–² â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ + // │ + // │search range of key-value pairs in cache layers that could + // │affect whether to yield the next item in the underlying stream + + // Optimization: ensure we have a peekable item in the underlying stream before continuing. + let mut this = self.project(); + ready!(this.underlying.as_mut().poll_peek(cx)); + + // Now that we're ready to interleave the next underlying item with any + // cache layers, lock them all for the duration of the method, using a + // SmallVec to (hopefully) store all the guards on the stack. + let mut layer_guards = SmallVec::<[_; 8]>::new(); + for layer in this.layers.iter() { + layer_guards.push(layer.read()); + } + // Tacking the leaf cache onto the list is important to not miss any values. + // It's stored separately so that the contents of the + layer_guards.push(this.leaf_cache.read()); + + loop { + // Obtain a reference to the next key-value pair from the underlying stream. + let peeked = match ready!(this.underlying.as_mut().poll_peek(cx)) { + // If we get an underlying error, bubble it up immediately. + Some(Err(_e)) => return this.underlying.poll_next(cx), + // Otherwise, pass through the peeked value. + Some(Ok(pair)) => Some(pair), + None => None, + }; + + // To determine whether or not we should return the peeked value, we + // need to search the cache layers for keys that are between the last + // key we returned (exclusive, so we make forward progress on the + // stream) and the peeked key (inclusive, because we need to find out + // whether or not there was a covering deletion). + let search_range = ( + this.last_key + .as_ref() + .map(Bound::Excluded) + .unwrap_or(Bound::Included(this.prefix)), + peeked + .map(|(k, _)| Bound::Included(k)) + .unwrap_or(Bound::Unbounded), + ); + + // It'd be slightly cleaner to initialize `leftmost_pair` with the + // peeked contents, but that would taint `leftmost_pair` with a + // `peeked` borrow, and we may need to mutate the underlying stream + // later. Instead, initialize it with `None` to only search the + // cache layers, and compare at the end. + let mut leftmost_pair = None; + for layer in layer_guards.iter() { + // Find this layer's leftmost key-value pair in the search range. + let found_pair = layer + .as_ref() + .expect("layer must not have been applied") + .nonverifiable_changes + .range::, _>(search_range) + .take_while(|(k, _v)| k.starts_with(this.prefix)) + .next(); + + // Check whether the new pair, if any, is the new leftmost pair. + match (leftmost_pair, found_pair) { + // We want to replace the pair even when the key is equal, + // so that we always prefer a newer value over an older value. + (Some((leftmost_k, _)), Some((k, v))) if k <= leftmost_k => { + leftmost_pair = Some((k, v)); + } + (None, Some((k, v))) => { + leftmost_pair = Some((k, v)); + } + _ => {} + } + } + + // Overwrite a Vec, attempting to reuse its existing allocation. + let overwrite_in_place = |dst: &mut Option>, src: &[u8]| { + if let Some(ref mut dst) = dst { + dst.clear(); + dst.extend_from_slice(src); + } else { + *dst = Some(src.to_vec()); + } + }; + + match (leftmost_pair, peeked) { + (Some((k, v)), peeked) => { + // Since we searched for cached keys less than or equal to + // the peeked key, we know that the cached pair takes + // priority over the peeked pair. + // + // If the keys are exactly equal, we advance the underlying stream. + if peeked.map(|(kp, _)| kp) == Some(k) { + let _ = this.underlying.as_mut().poll_next(cx); + } + overwrite_in_place(this.last_key, k); + if let Some(v) = v { + // If the value is Some, we have a key-value pair to yield. + return Poll::Ready(Some(Ok((k.clone(), v.clone())))); + } else { + // If the value is None, this pair represents a deletion, + // so continue looping until we find a non-deleted pair. + continue; + } + } + (None, Some(_)) => { + // There's no cache hit before the peeked pair, so we want + // to extract and return it from the underlying stream. + let Poll::Ready(Some(Ok((k, v)))) = this.underlying.as_mut().poll_next(cx) + else { + unreachable!("peeked stream must yield peeked item"); + }; + overwrite_in_place(this.last_key, &k); + return Poll::Ready(Some(Ok((k, v)))); + } + (None, None) => { + // Terminate the stream, no more items are available. + return Poll::Ready(None); + } + } + } + } +} + +// This implementation is almost exactly the same as the one above, but with +// minor tweaks to work with string keys and to read different fields from the cache. +// Update them together. + +#[pin_project] +pub struct StateDeltaPrefixRawStream +where + St: Stream)>>, +{ + #[pin] + pub(crate) underlying: Peekable, + pub(crate) layers: Vec>>>, + pub(crate) leaf_cache: Arc>>, + pub(crate) last_key: Option, + pub(crate) prefix: String, +} + +impl Stream for StateDeltaPrefixRawStream +where + St: Stream)>>, +{ + type Item = Result<(String, Vec)>; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + // This implementation interleaves items from the underlying stream with + // items in cache layers. To do this, it tracks the last key it + // returned, then, for each item in the underlying stream, searches for + // cached keys that lie between the last-returned key and the item's key, + // checking whether the cached key represents a deletion requiring further + // scanning. This process is illustrated as follows: + // + // â—‡ skip â—‡ skip â–² yield â–² yield â–² yield + // │ │ │ │ │ + // â–‘ pick ──────────────▶ â–‘ pick ────────▶ â–ˆ pick ────────▶ â–ˆ pick ─────────▶ â–ˆ pick + // â–² â–² â–² â–² â–² + // â–² │ â–² │ â–² │ â–² │ â–² │ + // write│ │ │ │ │ │ │ │ │ │ + // layer│ │ â–ˆ │ │ â–ˆ │ │█ │ â–ˆ │ â–ˆ │ + // │ │ â–‘ │ â–‘ │ ░│ │ â–‘ │ â–‘ │ + // │ â–‘ │ â–‘ │ â–‘ │ │ â–‘ │ â–‘ │ + // │ â–ˆ │ â–ˆ │ █│ │ â–ˆ │ â–ˆ │ + // │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ + // │ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ + // ─┼(─────]────keys─▶ ─┼──(───]────▶ ─┼────(─]────▶ ─┼─────(]────▶ ─┼──────(──]─▶ + // │ â–² â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ + // │ + // │search range of key-value pairs in cache layers that could + // │affect whether to yield the next item in the underlying stream + + // Optimization: ensure we have a peekable item in the underlying stream before continuing. + let mut this = self.project(); + ready!(this.underlying.as_mut().poll_peek(cx)); + + // Now that we're ready to interleave the next underlying item with any + // cache layers, lock them all for the duration of the method, using a + // SmallVec to (hopefully) store all the guards on the stack. + let mut layer_guards = SmallVec::<[_; 8]>::new(); + for layer in this.layers.iter() { + layer_guards.push(layer.read()); + } + // Tacking the leaf cache onto the list is important to not miss any values. + // It's stored separately so that the contents of the + layer_guards.push(this.leaf_cache.read()); + + loop { + // Obtain a reference to the next key-value pair from the underlying stream. + let peeked = match ready!(this.underlying.as_mut().poll_peek(cx)) { + // If we get an underlying error, bubble it up immediately. + Some(Err(_e)) => return this.underlying.poll_next(cx), + // Otherwise, pass through the peeked value. + Some(Ok(pair)) => Some(pair), + None => None, + }; + + // To determine whether or not we should return the peeked value, we + // need to search the cache layers for keys that are between the last + // key we returned (exclusive, so we make forward progress on the + // stream) and the peeked key (inclusive, because we need to find out + // whether or not there was a covering deletion). + let search_range = ( + this.last_key + .as_ref() + .map(Bound::Excluded) + .unwrap_or(Bound::Included(this.prefix)), + peeked + .map(|(k, _)| Bound::Included(k)) + .unwrap_or(Bound::Unbounded), + ); + + // It'd be slightly cleaner to initialize `leftmost_pair` with the + // peeked contents, but that would taint `leftmost_pair` with a + // `peeked` borrow, and we may need to mutate the underlying stream + // later. Instead, initialize it with `None` to only search the + // cache layers, and compare at the end. + let mut leftmost_pair = None; + for layer in layer_guards.iter() { + // Find this layer's leftmost key-value pair in the search range. + let found_pair = layer + .as_ref() + .expect("layer must not have been applied") + .unwritten_changes + .range::(search_range) + .take_while(|(k, _v)| k.starts_with(this.prefix.as_str())) + .next(); + + // Check whether the new pair, if any, is the new leftmost pair. + match (leftmost_pair, found_pair) { + // We want to replace the pair even when the key is equal, + // so that we always prefer a newer value over an older value. + (Some((leftmost_k, _)), Some((k, v))) if k <= leftmost_k => { + leftmost_pair = Some((k, v)); + } + (None, Some((k, v))) => { + leftmost_pair = Some((k, v)); + } + _ => {} + } + } + + // Overwrite a Vec, attempting to reuse its existing allocation. + let overwrite_in_place = |dst: &mut Option, src: &str| { + if let Some(ref mut dst) = dst { + dst.clear(); + dst.push_str(src); + } else { + *dst = Some(src.to_owned()); + } + }; + + match (leftmost_pair, peeked) { + (Some((k, v)), peeked) => { + // Since we searched for cached keys less than or equal to + // the peeked key, we know that the cached pair takes + // priority over the peeked pair. + // + // If the keys are exactly equal, we advance the underlying stream. + if peeked.map(|(kp, _)| kp) == Some(k) { + let _ = this.underlying.as_mut().poll_next(cx); + } + overwrite_in_place(this.last_key, k); + if let Some(v) = v { + // If the value is Some, we have a key-value pair to yield. + return Poll::Ready(Some(Ok((k.clone(), v.clone())))); + } else { + // If the value is None, this pair represents a deletion, + // so continue looping until we find a non-deleted pair. + continue; + } + } + (None, Some(_)) => { + // There's no cache hit before the peeked pair, so we want + // to extract and return it from the underlying stream. + let Poll::Ready(Some(Ok((k, v)))) = this.underlying.as_mut().poll_next(cx) + else { + unreachable!("peeked stream must yield peeked item"); + }; + overwrite_in_place(this.last_key, &k); + return Poll::Ready(Some(Ok((k, v)))); + } + (None, None) => { + // Terminate the stream, no more items are available. + return Poll::Ready(None); + } + } + } + } +} + +// This implementation is almost exactly the same as the one above, but with +// minor tweaks to work with string keys and to read different fields from the cache. +// Update them together. + +#[pin_project] +pub struct StateDeltaPrefixKeysStream +where + St: Stream>, +{ + #[pin] + pub(crate) underlying: Peekable, + pub(crate) layers: Vec>>>, + pub(crate) leaf_cache: Arc>>, + pub(crate) last_key: Option, + pub(crate) prefix: String, +} + +impl Stream for StateDeltaPrefixKeysStream +where + St: Stream>, +{ + type Item = Result; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + // This implementation interleaves items from the underlying stream with + // items in cache layers. To do this, it tracks the last key it + // returned, then, for each item in the underlying stream, searches for + // cached keys that lie between the last-returned key and the item's key, + // checking whether the cached key represents a deletion requiring further + // scanning. This process is illustrated as follows: + // + // â—‡ skip â—‡ skip â–² yield â–² yield â–² yield + // │ │ │ │ │ + // â–‘ pick ──────────────▶ â–‘ pick ────────▶ â–ˆ pick ────────▶ â–ˆ pick ─────────▶ â–ˆ pick + // â–² â–² â–² â–² â–² + // â–² │ â–² │ â–² │ â–² │ â–² │ + // write│ │ │ │ │ │ │ │ │ │ + // layer│ │ â–ˆ │ │ â–ˆ │ │█ │ â–ˆ │ â–ˆ │ + // │ │ â–‘ │ â–‘ │ ░│ │ â–‘ │ â–‘ │ + // │ â–‘ │ â–‘ │ â–‘ │ │ â–‘ │ â–‘ │ + // │ â–ˆ │ â–ˆ │ █│ │ â–ˆ │ â–ˆ │ + // │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ + // │ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ + // ─┼(─────]────keys─▶ ─┼──(───]────▶ ─┼────(─]────▶ ─┼─────(]────▶ ─┼──────(──]─▶ + // │ â–² â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ + // │ + // │search range of key-value pairs in cache layers that could + // │affect whether to yield the next item in the underlying stream + + // Optimization: ensure we have a peekable item in the underlying stream before continuing. + let mut this = self.project(); + ready!(this.underlying.as_mut().poll_peek(cx)); + + // Now that we're ready to interleave the next underlying item with any + // cache layers, lock them all for the duration of the method, using a + // SmallVec to (hopefully) store all the guards on the stack. + let mut layer_guards = SmallVec::<[_; 8]>::new(); + for layer in this.layers.iter() { + layer_guards.push(layer.read()); + } + // Tacking the leaf cache onto the list is important to not miss any values. + // It's stored separately so that the contents of the + layer_guards.push(this.leaf_cache.read()); + + loop { + // Obtain a reference to the next key-value pair from the underlying stream. + let peeked = match ready!(this.underlying.as_mut().poll_peek(cx)) { + // If we get an underlying error, bubble it up immediately. + Some(Err(_e)) => return this.underlying.poll_next(cx), + // Otherwise, pass through the peeked value. + Some(Ok(pair)) => Some(pair), + None => None, + }; + + // To determine whether or not we should return the peeked value, we + // need to search the cache layers for keys that are between the last + // key we returned (exclusive, so we make forward progress on the + // stream) and the peeked key (inclusive, because we need to find out + // whether or not there was a covering deletion). + let search_range = ( + this.last_key + .as_ref() + .map(Bound::Excluded) + .unwrap_or(Bound::Included(this.prefix)), + peeked.map(Bound::Included).unwrap_or(Bound::Unbounded), + ); + + // It'd be slightly cleaner to initialize `leftmost_pair` with the + // peeked contents, but that would taint `leftmost_pair` with a + // `peeked` borrow, and we may need to mutate the underlying stream + // later. Instead, initialize it with `None` to only search the + // cache layers, and compare at the end. + let mut leftmost_pair = None; + for layer in layer_guards.iter() { + // Find this layer's leftmost key-value pair in the search range. + let found_pair = layer + .as_ref() + .expect("layer must not have been applied") + .unwritten_changes + .range::(search_range) + .take_while(|(k, _v)| k.starts_with(this.prefix.as_str())) + .next(); + + // Check whether the new pair, if any, is the new leftmost pair. + match (leftmost_pair, found_pair) { + // We want to replace the pair even when the key is equal, + // so that we always prefer a newer value over an older value. + (Some((leftmost_k, _)), Some((k, v))) if k <= leftmost_k => { + leftmost_pair = Some((k, v)); + } + (None, Some((k, v))) => { + leftmost_pair = Some((k, v)); + } + _ => {} + } + } + + // Overwrite a Vec, attempting to reuse its existing allocation. + let overwrite_in_place = |dst: &mut Option, src: &str| { + if let Some(ref mut dst) = dst { + dst.clear(); + dst.push_str(src); + } else { + *dst = Some(src.to_owned()); + } + }; + + match (leftmost_pair, peeked) { + (Some((k, v)), peeked) => { + // Since we searched for cached keys less than or equal to + // the peeked key, we know that the cached pair takes + // priority over the peeked pair. + // + // If the keys are exactly equal, we advance the underlying stream. + if peeked == Some(k) { + let _ = this.underlying.as_mut().poll_next(cx); + } + overwrite_in_place(this.last_key, k); + if v.is_some() { + // If the value is Some, we have a key-value pair to yield. + return Poll::Ready(Some(Ok(k.clone()))); + } else { + // If the value is None, this pair represents a deletion, + // so continue looping until we find a non-deleted pair. + continue; + } + } + (None, Some(_)) => { + // There's no cache hit before the peeked pair, so we want + // to extract and return it from the underlying stream. + let Poll::Ready(Some(Ok(k))) = this.underlying.as_mut().poll_next(cx) else { + unreachable!("peeked stream must yield peeked item"); + }; + overwrite_in_place(this.last_key, &k); + return Poll::Ready(Some(Ok(k))); + } + (None, None) => { + // Terminate the stream, no more items are available. + return Poll::Ready(None); + } + } + } + } +} + +#[pin_project] +/// A stream of key-value pairs that interleaves a nonverifiable storage and caching layers. +// This implementation differs from [`StateDeltaNonconsensusPrefixRawStream`] sin how +// it specifies the search space for the cache. +pub struct StateDeltaNonconsensusRangeRawStream +where + St: Stream, Vec)>>, +{ + #[pin] + pub(crate) underlying: Peekable, + pub(crate) layers: Vec>>>, + pub(crate) leaf_cache: Arc>>, + pub(crate) last_key: Option>, + pub(crate) prefix: Option>, + pub(crate) range: (Option>, Option>), +} + +impl Stream for StateDeltaNonconsensusRangeRawStream +where + St: Stream, Vec)>>, +{ + type Item = Result<(Vec, Vec)>; + + fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { + // This implementation interleaves items from the underlying stream with + // items in cache layers. To do this, it tracks the last key it + // returned, then, for each item in the underlying stream, searches for + // cached keys that lie between the last-returned key and the item's key, + // checking whether the cached key represents a deletion requiring further + // scanning. This process is illustrated as follows: + // + // â—‡ skip â—‡ skip â–² yield â–² yield â–² yield + // │ │ │ │ │ + // â–‘ pick ──────────────▶ â–‘ pick ────────▶ â–ˆ pick ────────▶ â–ˆ pick ─────────▶ â–ˆ pick + // â–² â–² â–² â–² â–² + // â–² │ â–² │ â–² │ â–² │ â–² │ + // write│ │ │ │ │ │ │ │ │ │ + // layer│ │ â–ˆ │ │ â–ˆ │ │█ │ â–ˆ │ â–ˆ │ + // │ │ â–‘ │ â–‘ │ ░│ │ â–‘ │ â–‘ │ + // │ â–‘ │ â–‘ │ â–‘ │ │ â–‘ │ â–‘ │ + // │ â–ˆ │ â–ˆ │ █│ │ â–ˆ │ â–ˆ │ + // │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ + // │ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ │ â–ˆ + // ─┼(─────]────keys─▶ ─┼──(───]────▶ ─┼────(─]────▶ ─┼─────(]────▶ ─┼──────(──]─▶ + // │ â–² â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ │ â–ˆ â–ˆ + // │ + // │search range of key-value pairs in cache layers that could + // │affect whether to yield the next item in the underlying stream + + // Optimization: ensure we have a peekable item in the underlying stream before continuing. + let mut this = self.project(); + ready!(this.underlying.as_mut().poll_peek(cx)); + // Now that we're ready to interleave the next underlying item with any + // cache layers, lock them all for the duration of the method, using a + // SmallVec to (hopefully) store all the guards on the stack. + let mut layer_guards = SmallVec::<[_; 8]>::new(); + for layer in this.layers.iter() { + layer_guards.push(layer.read()); + } + // Tacking the leaf cache onto the list is important to not miss any values. + // It's stored separately so that the contents of the + layer_guards.push(this.leaf_cache.read()); + + let (binding_prefix, binding_start, binding_end) = (Vec::new(), Vec::new(), Vec::new()); + let prefix = this.prefix.as_ref().unwrap_or(&binding_prefix); + let start = this.range.0.as_ref().unwrap_or(&binding_start); + let end = this.range.1.as_ref().unwrap_or(&binding_end); + + let mut prefix_start = Vec::with_capacity(prefix.len() + start.len()); + let mut prefix_end = Vec::with_capacity(prefix.len() + end.len()); + + prefix_start.extend(prefix); + prefix_start.extend(start); + prefix_end.extend(prefix); + prefix_end.extend(end); + + loop { + // Obtain a reference to the next key-value pair from the underlying stream. + let peeked = match ready!(this.underlying.as_mut().poll_peek(cx)) { + // If we get an underlying error, bubble it up immediately. + Some(Err(_e)) => return this.underlying.poll_next(cx), + // Otherwise, pass through the peeked value. + Some(Ok(pair)) => Some(pair), + None => None, + }; + + // We want to decide which key to return next, so we have to inspect the cache layers. + // To do this, we have to define a search space so that we cover updates and new insertions + // that could affect the next key to return. + let lower_bound = match this.last_key.as_ref() { + Some(k) => Bound::Excluded(k), + None => Bound::Included(prefix_start.as_ref()), + }; + + let upper_bound = match peeked { + Some((k, _v)) => Bound::Included(k), + None => this + .range + .1 + .as_ref() + .map_or(Bound::Unbounded, |_| Bound::Excluded(prefix_end.as_ref())), + }; + + let search_range = (lower_bound, upper_bound); + + tracing::debug!( + "searching cache layers for key-value pairs in range {:?}", + search_range + ); + + // It'd be slightly cleaner to initialize `leftmost_pair` with the + // peeked contents, but that would taint `leftmost_pair` with a + // `peeked` borrow, and we may need to mutate the underlying stream + // later. Instead, initialize it with `None` to only search the + // cache layers, and compare at the end. + let mut leftmost_pair = None; + for layer in layer_guards.iter() { + // Find this layer's leftmost key-value pair in the search range. + let found_pair = layer + .as_ref() + .expect("layer must not have been applied") + .nonverifiable_changes + .range::, _>(search_range) + .take_while(|(k, v)| { + tracing::debug!(?v, ?k, "found key-value pair in cache layer"); + match peeked { + Some((peeked_k, _)) => { + k.starts_with(prefix.as_slice()) && k <= &peeked_k + } + None => k.starts_with(prefix.as_slice()), + } + }) + .next(); + + // Check whether the new pair, if any, is the new leftmost pair. + match (leftmost_pair, found_pair) { + // We want to replace the pair even when the key is equal, + // so that we always prefer a newer value over an older value. + (Some((leftmost_k, _)), Some((k, v))) if k <= leftmost_k => { + leftmost_pair = Some((k, v)); + } + (None, Some((k, v))) => { + leftmost_pair = Some((k, v)); + } + _ => {} + } + } + + // Overwrite a Vec, attempting to reuse its existing allocation. + let overwrite_in_place = |dst: &mut Option>, src: &[u8]| { + if let Some(ref mut dst) = dst { + dst.clear(); + dst.extend_from_slice(src); + } else { + *dst = Some(src.to_vec()); + } + }; + + match (leftmost_pair, peeked) { + (Some((k, v)), peeked) => { + // Since we searched for cached keys less than or equal to + // the peeked key, we know that the cached pair takes + // priority over the peeked pair. + // + // If the keys are exactly equal, we advance the underlying stream. + if peeked.map(|(kp, _)| kp) == Some(k) { + let _ = this.underlying.as_mut().poll_next(cx); + } + overwrite_in_place(this.last_key, k); + if let Some(v) = v { + // If the value is Some, we have a key-value pair to yield. + return Poll::Ready(Some(Ok((k.clone(), v.clone())))); + } else { + // If the value is None, this pair represents a deletion, + // so continue looping until we find a non-deleted pair. + continue; + } + } + (None, Some(_)) => { + // There's no cache hit before the peeked pair, so we want + // to extract and return it from the underlying stream. + let Poll::Ready(Some(Ok((k, v)))) = this.underlying.as_mut().poll_next(cx) + else { + unreachable!("peeked stream must yield peeked item"); + }; + overwrite_in_place(this.last_key, &k); + return Poll::Ready(Some(Ok((k, v)))); + } + (None, None) => { + // Terminate the stream, no more items are available. + return Poll::Ready(None); + } + } + } + } +} diff --git a/crates/cnidarium/src/gen/penumbra.cnidarium.v1.rs b/crates/cnidarium/src/gen/penumbra.cnidarium.v1.rs new file mode 100644 index 0000000000..7a04181967 --- /dev/null +++ b/crates/cnidarium/src/gen/penumbra.cnidarium.v1.rs @@ -0,0 +1,815 @@ +/// Performs a key-value query against the nonverifiable storage, +/// using a byte-encoded key. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NonVerifiableKeyValueRequest { + #[prost(message, optional, tag = "1")] + pub key: ::core::option::Option, +} +/// Nested message and enum types in `NonVerifiableKeyValueRequest`. +pub mod non_verifiable_key_value_request { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Key { + #[prost(bytes = "vec", tag = "1")] + pub inner: ::prost::alloc::vec::Vec, + } + impl ::prost::Name for Key { + const NAME: &'static str = "Key"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.NonVerifiableKeyValueRequest.{}", Self::NAME + ) + } + } +} +impl ::prost::Name for NonVerifiableKeyValueRequest { + const NAME: &'static str = "NonVerifiableKeyValueRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NonVerifiableKeyValueResponse { + /// The value corresponding to the specified key, if it was found. + #[prost(message, optional, tag = "1")] + pub value: ::core::option::Option, +} +/// Nested message and enum types in `NonVerifiableKeyValueResponse`. +pub mod non_verifiable_key_value_response { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Value { + #[prost(bytes = "vec", tag = "1")] + pub value: ::prost::alloc::vec::Vec, + } + impl ::prost::Name for Value { + const NAME: &'static str = "Value"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.NonVerifiableKeyValueResponse.{}", Self::NAME + ) + } + } +} +impl ::prost::Name for NonVerifiableKeyValueResponse { + const NAME: &'static str = "NonVerifiableKeyValueResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Performs a key-value query against the JMT, either by key or by key hash. +/// +/// Proofs are only supported by key. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct KeyValueRequest { + /// If set, the key to fetch from storage. + #[prost(string, tag = "2")] + pub key: ::prost::alloc::string::String, + /// whether to return a proof + #[prost(bool, tag = "3")] + pub proof: bool, +} +impl ::prost::Name for KeyValueRequest { + const NAME: &'static str = "KeyValueRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct KeyValueResponse { + /// The value corresponding to the specified key, if it was found. + #[prost(message, optional, tag = "1")] + pub value: ::core::option::Option, + /// A proof of existence or non-existence. + #[prost(message, optional, tag = "2")] + pub proof: ::core::option::Option< + ::ibc_proto::ibc::core::commitment::v1::MerkleProof, + >, +} +/// Nested message and enum types in `KeyValueResponse`. +pub mod key_value_response { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Value { + #[prost(bytes = "vec", tag = "1")] + pub value: ::prost::alloc::vec::Vec, + } + impl ::prost::Name for Value { + const NAME: &'static str = "Value"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.KeyValueResponse.{}", Self::NAME + ) + } + } +} +impl ::prost::Name for KeyValueResponse { + const NAME: &'static str = "KeyValueResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Performs a prefixed key-value query, by string prefix. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PrefixValueRequest { + /// The prefix to fetch subkeys from storage. + #[prost(string, tag = "2")] + pub prefix: ::prost::alloc::string::String, +} +impl ::prost::Name for PrefixValueRequest { + const NAME: &'static str = "PrefixValueRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PrefixValueResponse { + #[prost(string, tag = "1")] + pub key: ::prost::alloc::string::String, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for PrefixValueResponse { + const NAME: &'static str = "PrefixValueResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Requests a stream of new key-value pairs that have been committed to the state. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WatchRequest { + /// A regex for keys in the verifiable storage. + /// + /// Only key-value updates whose keys match this regex will be returned. + /// Note that the empty string matches all keys. + /// To exclude all keys, use the regex "$^", which matches no strings. + #[prost(string, tag = "1")] + pub key_regex: ::prost::alloc::string::String, + /// A regex for keys in the nonverifiable storage. + /// + /// Only key-value updates whose keys match this regex will be returned. + /// Note that the empty string matches all keys. + /// To exclude all keys, use the regex "$^", which matches no strings. + #[prost(string, tag = "2")] + pub nv_key_regex: ::prost::alloc::string::String, +} +impl ::prost::Name for WatchRequest { + const NAME: &'static str = "WatchRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// A key-value pair that has been committed to the state. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WatchResponse { + /// The state version the key-value pair was committed at. + #[prost(uint64, tag = "1")] + pub version: u64, + /// The entry that was committed. + #[prost(oneof = "watch_response::Entry", tags = "5, 6")] + pub entry: ::core::option::Option, +} +/// Nested message and enum types in `WatchResponse`. +pub mod watch_response { + /// Elements of the verifiable storage have string keys. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct KeyValue { + #[prost(string, tag = "1")] + pub key: ::prost::alloc::string::String, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, + /// If set to true, the key-value pair was deleted. + /// This allows distinguishing a deleted key-value pair from a key-value pair whose value is empty. + #[prost(bool, tag = "3")] + pub deleted: bool, + } + impl ::prost::Name for KeyValue { + const NAME: &'static str = "KeyValue"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.WatchResponse.{}", Self::NAME) + } + } + /// Elements of the nonverifiable storage have byte keys. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct NvKeyValue { + #[prost(bytes = "vec", tag = "1")] + pub key: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, + /// If set to true, the key-value pair was deleted. + /// This allows distinguishing a deleted key-value pair from a key-value pair whose value is empty. + #[prost(bool, tag = "3")] + pub deleted: bool, + } + impl ::prost::Name for NvKeyValue { + const NAME: &'static str = "NvKeyValue"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.WatchResponse.{}", Self::NAME) + } + } + /// The entry that was committed. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Entry { + #[prost(message, tag = "5")] + Kv(KeyValue), + #[prost(message, tag = "6")] + NvKv(NvKeyValue), + } +} +impl ::prost::Name for WatchResponse { + const NAME: &'static str = "WatchResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Generated client implementations. +#[cfg(feature = "rpc")] +pub mod query_service_client { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct QueryServiceClient { + inner: tonic::client::Grpc, + } + impl QueryServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl QueryServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> QueryServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + Send + Sync, + { + QueryServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the JMT storage. + pub async fn key_value( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/KeyValue", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("penumbra.cnidarium.v1.QueryService", "KeyValue"), + ); + self.inner.unary(req, path, codec).await + } + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the non-verifiable storage. + pub async fn non_verifiable_key_value( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/NonVerifiableKeyValue", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "penumbra.cnidarium.v1.QueryService", + "NonVerifiableKeyValue", + ), + ); + self.inner.unary(req, path, codec).await + } + /// General-purpose prefixed key-value state query API, that can be used to query + /// arbitrary prefixes in the JMT storage. + pub async fn prefix_value( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/PrefixValue", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("penumbra.cnidarium.v1.QueryService", "PrefixValue"), + ); + self.inner.server_streaming(req, path, codec).await + } + /// Subscribes to a stream of key-value updates, with regex filtering on keys. + pub async fn watch( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/Watch", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("penumbra.cnidarium.v1.QueryService", "Watch")); + self.inner.server_streaming(req, path, codec).await + } + } +} +/// Generated server implementations. +#[cfg(feature = "rpc")] +pub mod query_service_server { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. + #[async_trait] + pub trait QueryService: Send + Sync + 'static { + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the JMT storage. + async fn key_value( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the non-verifiable storage. + async fn non_verifiable_key_value( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the PrefixValue method. + type PrefixValueStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + Send + + 'static; + /// General-purpose prefixed key-value state query API, that can be used to query + /// arbitrary prefixes in the JMT storage. + async fn prefix_value( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the Watch method. + type WatchStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + Send + + 'static; + /// Subscribes to a stream of key-value updates, with regex filtering on keys. + async fn watch( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + } + #[derive(Debug)] + pub struct QueryServiceServer { + inner: _Inner, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + struct _Inner(Arc); + impl QueryServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for QueryServiceServer + where + T: QueryService, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); + match req.uri().path() { + "/penumbra.cnidarium.v1.QueryService/KeyValue" => { + #[allow(non_camel_case_types)] + struct KeyValueSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::UnaryService + for KeyValueSvc { + type Response = super::KeyValueResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::key_value(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = KeyValueSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/penumbra.cnidarium.v1.QueryService/NonVerifiableKeyValue" => { + #[allow(non_camel_case_types)] + struct NonVerifiableKeyValueSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::UnaryService + for NonVerifiableKeyValueSvc { + type Response = super::NonVerifiableKeyValueResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::non_verifiable_key_value( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = NonVerifiableKeyValueSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/penumbra.cnidarium.v1.QueryService/PrefixValue" => { + #[allow(non_camel_case_types)] + struct PrefixValueSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::ServerStreamingService + for PrefixValueSvc { + type Response = super::PrefixValueResponse; + type ResponseStream = T::PrefixValueStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::prefix_value(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = PrefixValueSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/penumbra.cnidarium.v1.QueryService/Watch" => { + #[allow(non_camel_case_types)] + struct WatchSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::ServerStreamingService + for WatchSvc { + type Response = super::WatchResponse; + type ResponseStream = T::WatchStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::watch(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = WatchSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) + }) + } + } + } + } + impl Clone for QueryServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.cnidarium.v1.QueryService"; + } +} diff --git a/crates/cnidarium/src/gen/penumbra.cnidarium.v1.serde.rs b/crates/cnidarium/src/gen/penumbra.cnidarium.v1.serde.rs new file mode 100644 index 0000000000..8751e80e6e --- /dev/null +++ b/crates/cnidarium/src/gen/penumbra.cnidarium.v1.serde.rs @@ -0,0 +1,1433 @@ +impl serde::Serialize for KeyValueRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if self.proof { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueRequest", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if self.proof { + struct_ser.serialize_field("proof", &self.proof)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for KeyValueRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "proof", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Proof, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "proof" => Ok(GeneratedField::Proof), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = KeyValueRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut proof__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Proof => { + if proof__.is_some() { + return Err(serde::de::Error::duplicate_field("proof")); + } + proof__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(KeyValueRequest { + key: key__.unwrap_or_default(), + proof: proof__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for KeyValueResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + if self.proof.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueResponse", len)?; + if let Some(v) = self.value.as_ref() { + struct_ser.serialize_field("value", v)?; + } + if let Some(v) = self.proof.as_ref() { + struct_ser.serialize_field("proof", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for KeyValueResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + "proof", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + Proof, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + "proof" => Ok(GeneratedField::Proof), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = KeyValueResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + let mut proof__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = map_.next_value()?; + } + GeneratedField::Proof => { + if proof__.is_some() { + return Err(serde::de::Error::duplicate_field("proof")); + } + proof__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(KeyValueResponse { + value: value__, + proof: proof__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for key_value_response::Value { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueResponse.Value", len)?; + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for key_value_response::Value { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = key_value_response::Value; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueResponse.Value") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(key_value_response::Value { + value: value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueResponse.Value", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for NonVerifiableKeyValueRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.key.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueRequest", len)?; + if let Some(v) = self.key.as_ref() { + struct_ser.serialize_field("key", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for NonVerifiableKeyValueRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = NonVerifiableKeyValueRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.NonVerifiableKeyValueRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(NonVerifiableKeyValueRequest { + key: key__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for non_verifiable_key_value_request::Key { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.inner.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueRequest.Key", len)?; + if !self.inner.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for non_verifiable_key_value_request::Key { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "inner", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Inner, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "inner" => Ok(GeneratedField::Inner), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = non_verifiable_key_value_request::Key; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.NonVerifiableKeyValueRequest.Key") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut inner__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Inner => { + if inner__.is_some() { + return Err(serde::de::Error::duplicate_field("inner")); + } + inner__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(non_verifiable_key_value_request::Key { + inner: inner__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueRequest.Key", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for NonVerifiableKeyValueResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueResponse", len)?; + if let Some(v) = self.value.as_ref() { + struct_ser.serialize_field("value", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for NonVerifiableKeyValueResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = NonVerifiableKeyValueResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.NonVerifiableKeyValueResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(NonVerifiableKeyValueResponse { + value: value__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for non_verifiable_key_value_response::Value { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueResponse.Value", len)?; + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for non_verifiable_key_value_response::Value { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = non_verifiable_key_value_response::Value; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.NonVerifiableKeyValueResponse.Value") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(non_verifiable_key_value_response::Value { + value: value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueResponse.Value", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PrefixValueRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.prefix.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.PrefixValueRequest", len)?; + if !self.prefix.is_empty() { + struct_ser.serialize_field("prefix", &self.prefix)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PrefixValueRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "prefix", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Prefix, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "prefix" => Ok(GeneratedField::Prefix), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PrefixValueRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.PrefixValueRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut prefix__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Prefix => { + if prefix__.is_some() { + return Err(serde::de::Error::duplicate_field("prefix")); + } + prefix__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(PrefixValueRequest { + prefix: prefix__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.PrefixValueRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PrefixValueResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.PrefixValueResponse", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PrefixValueResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PrefixValueResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.PrefixValueResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(PrefixValueResponse { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.PrefixValueResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for WatchRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key_regex.is_empty() { + len += 1; + } + if !self.nv_key_regex.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchRequest", len)?; + if !self.key_regex.is_empty() { + struct_ser.serialize_field("keyRegex", &self.key_regex)?; + } + if !self.nv_key_regex.is_empty() { + struct_ser.serialize_field("nvKeyRegex", &self.nv_key_regex)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for WatchRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key_regex", + "keyRegex", + "nv_key_regex", + "nvKeyRegex", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + KeyRegex, + NvKeyRegex, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "keyRegex" | "key_regex" => Ok(GeneratedField::KeyRegex), + "nvKeyRegex" | "nv_key_regex" => Ok(GeneratedField::NvKeyRegex), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = WatchRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key_regex__ = None; + let mut nv_key_regex__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::KeyRegex => { + if key_regex__.is_some() { + return Err(serde::de::Error::duplicate_field("keyRegex")); + } + key_regex__ = Some(map_.next_value()?); + } + GeneratedField::NvKeyRegex => { + if nv_key_regex__.is_some() { + return Err(serde::de::Error::duplicate_field("nvKeyRegex")); + } + nv_key_regex__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(WatchRequest { + key_regex: key_regex__.unwrap_or_default(), + nv_key_regex: nv_key_regex__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for WatchResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.version != 0 { + len += 1; + } + if self.entry.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse", len)?; + if self.version != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("version", ToString::to_string(&self.version).as_str())?; + } + if let Some(v) = self.entry.as_ref() { + match v { + watch_response::Entry::Kv(v) => { + struct_ser.serialize_field("kv", v)?; + } + watch_response::Entry::NvKv(v) => { + struct_ser.serialize_field("nvKv", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for WatchResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "version", + "kv", + "nv_kv", + "nvKv", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Version, + Kv, + NvKv, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "version" => Ok(GeneratedField::Version), + "kv" => Ok(GeneratedField::Kv), + "nvKv" | "nv_kv" => Ok(GeneratedField::NvKv), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = WatchResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut version__ = None; + let mut entry__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Version => { + if version__.is_some() { + return Err(serde::de::Error::duplicate_field("version")); + } + version__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Kv => { + if entry__.is_some() { + return Err(serde::de::Error::duplicate_field("kv")); + } + entry__ = map_.next_value::<::std::option::Option<_>>()?.map(watch_response::Entry::Kv) +; + } + GeneratedField::NvKv => { + if entry__.is_some() { + return Err(serde::de::Error::duplicate_field("nvKv")); + } + entry__ = map_.next_value::<::std::option::Option<_>>()?.map(watch_response::Entry::NvKv) +; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(WatchResponse { + version: version__.unwrap_or_default(), + entry: entry__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for watch_response::KeyValue { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + if self.deleted { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse.KeyValue", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + if self.deleted { + struct_ser.serialize_field("deleted", &self.deleted)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for watch_response::KeyValue { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + "deleted", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + Deleted, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + "deleted" => Ok(GeneratedField::Deleted), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = watch_response::KeyValue; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse.KeyValue") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + let mut deleted__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Deleted => { + if deleted__.is_some() { + return Err(serde::de::Error::duplicate_field("deleted")); + } + deleted__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(watch_response::KeyValue { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + deleted: deleted__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse.KeyValue", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for watch_response::NvKeyValue { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + if self.deleted { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse.NvKeyValue", len)?; + if !self.key.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("key", pbjson::private::base64::encode(&self.key).as_str())?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + if self.deleted { + struct_ser.serialize_field("deleted", &self.deleted)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for watch_response::NvKeyValue { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + "deleted", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + Deleted, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + "deleted" => Ok(GeneratedField::Deleted), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = watch_response::NvKeyValue; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse.NvKeyValue") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + let mut deleted__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Deleted => { + if deleted__.is_some() { + return Err(serde::de::Error::duplicate_field("deleted")); + } + deleted__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(watch_response::NvKeyValue { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + deleted: deleted__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse.NvKeyValue", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/cnidarium/src/gen/penumbra.cnidarium.v1alpha1.rs b/crates/cnidarium/src/gen/penumbra.cnidarium.v1alpha1.rs new file mode 100644 index 0000000000..7c9f774f27 --- /dev/null +++ b/crates/cnidarium/src/gen/penumbra.cnidarium.v1alpha1.rs @@ -0,0 +1,671 @@ +/// Performs a key-value query, either by key or by key hash. +/// +/// Proofs are only supported by key. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct KeyValueRequest { + /// If set, the key to fetch from storage. + #[prost(string, tag = "2")] + pub key: ::prost::alloc::string::String, + /// whether to return a proof + #[prost(bool, tag = "3")] + pub proof: bool, +} +impl ::prost::Name for KeyValueRequest { + const NAME: &'static str = "KeyValueRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct KeyValueResponse { + /// The value corresponding to the specified key, if it was found. + #[prost(message, optional, tag = "1")] + pub value: ::core::option::Option, + /// A proof of existence or non-existence. + #[prost(message, optional, tag = "2")] + pub proof: ::core::option::Option< + ::ibc_proto::ibc::core::commitment::v1::MerkleProof, + >, +} +/// Nested message and enum types in `KeyValueResponse`. +pub mod key_value_response { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Value { + #[prost(bytes = "vec", tag = "1")] + pub value: ::prost::alloc::vec::Vec, + } + impl ::prost::Name for Value { + const NAME: &'static str = "Value"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.KeyValueResponse.{}", Self::NAME + ) + } + } +} +impl ::prost::Name for KeyValueResponse { + const NAME: &'static str = "KeyValueResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Performs a prefixed key-value query, by string prefix. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PrefixValueRequest { + /// The prefix to fetch subkeys from storage. + #[prost(string, tag = "2")] + pub prefix: ::prost::alloc::string::String, +} +impl ::prost::Name for PrefixValueRequest { + const NAME: &'static str = "PrefixValueRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PrefixValueResponse { + #[prost(string, tag = "1")] + pub key: ::prost::alloc::string::String, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for PrefixValueResponse { + const NAME: &'static str = "PrefixValueResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Requests a stream of new key-value pairs that have been committed to the state. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WatchRequest { + /// A regex for keys in the verifiable storage. + /// + /// Only key-value updates whose keys match this regex will be returned. + /// Note that the empty string matches all keys. + /// To exclude all keys, use the regex "$^", which matches no strings. + #[prost(string, tag = "1")] + pub key_regex: ::prost::alloc::string::String, + /// A regex for keys in the nonverifiable storage. + /// + /// Only key-value updates whose keys match this regex will be returned. + /// Note that the empty string matches all keys. + /// To exclude all keys, use the regex "$^", which matches no strings. + #[prost(string, tag = "2")] + pub nv_key_regex: ::prost::alloc::string::String, +} +impl ::prost::Name for WatchRequest { + const NAME: &'static str = "WatchRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// A key-value pair that has been committed to the state. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WatchResponse { + /// The state version the key-value pair was committed at. + #[prost(uint64, tag = "1")] + pub version: u64, + /// The entry that was committed. + #[prost(oneof = "watch_response::Entry", tags = "5, 6")] + pub entry: ::core::option::Option, +} +/// Nested message and enum types in `WatchResponse`. +pub mod watch_response { + /// Elements of the verifiable storage have string keys. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct KeyValue { + #[prost(string, tag = "1")] + pub key: ::prost::alloc::string::String, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, + /// If set to true, the key-value pair was deleted. + /// This allows distinguishing a deleted key-value pair from a key-value pair whose value is empty. + #[prost(bool, tag = "3")] + pub deleted: bool, + } + impl ::prost::Name for KeyValue { + const NAME: &'static str = "KeyValue"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.WatchResponse.{}", Self::NAME + ) + } + } + /// Elements of the nonverifiable storage have byte keys. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct NvKeyValue { + #[prost(bytes = "vec", tag = "1")] + pub key: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, + /// If set to true, the key-value pair was deleted. + /// This allows distinguishing a deleted key-value pair from a key-value pair whose value is empty. + #[prost(bool, tag = "3")] + pub deleted: bool, + } + impl ::prost::Name for NvKeyValue { + const NAME: &'static str = "NvKeyValue"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.WatchResponse.{}", Self::NAME + ) + } + } + /// The entry that was committed. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Entry { + #[prost(message, tag = "5")] + Kv(KeyValue), + #[prost(message, tag = "6")] + NvKv(NvKeyValue), + } +} +impl ::prost::Name for WatchResponse { + const NAME: &'static str = "WatchResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Generated client implementations. +#[cfg(feature = "rpc")] +pub mod query_service_client { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct QueryServiceClient { + inner: tonic::client::Grpc, + } + impl QueryServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl QueryServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> QueryServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + Send + Sync, + { + QueryServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the JMT storage. + pub async fn key_value( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/KeyValue", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "penumbra.cnidarium.v1.QueryService", + "KeyValue", + ), + ); + self.inner.unary(req, path, codec).await + } + /// General-purpose prefixed key-value state query API, that can be used to query + /// arbitrary prefixes in the JMT storage. + pub async fn prefix_value( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/PrefixValue", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "penumbra.cnidarium.v1.QueryService", + "PrefixValue", + ), + ); + self.inner.server_streaming(req, path, codec).await + } + /// Subscribes to a stream of key-value updates, with regex filtering on keys. + pub async fn watch( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/Watch", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("penumbra.cnidarium.v1.QueryService", "Watch"), + ); + self.inner.server_streaming(req, path, codec).await + } + } +} +/// Generated server implementations. +#[cfg(feature = "rpc")] +pub mod query_service_server { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. + #[async_trait] + pub trait QueryService: Send + Sync + 'static { + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the JMT storage. + async fn key_value( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the PrefixValue method. + type PrefixValueStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + Send + + 'static; + /// General-purpose prefixed key-value state query API, that can be used to query + /// arbitrary prefixes in the JMT storage. + async fn prefix_value( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the Watch method. + type WatchStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + Send + + 'static; + /// Subscribes to a stream of key-value updates, with regex filtering on keys. + async fn watch( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + } + #[derive(Debug)] + pub struct QueryServiceServer { + inner: _Inner, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + struct _Inner(Arc); + impl QueryServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for QueryServiceServer + where + T: QueryService, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); + match req.uri().path() { + "/penumbra.cnidarium.v1.QueryService/KeyValue" => { + #[allow(non_camel_case_types)] + struct KeyValueSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::UnaryService + for KeyValueSvc { + type Response = super::KeyValueResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::key_value(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = KeyValueSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/penumbra.cnidarium.v1.QueryService/PrefixValue" => { + #[allow(non_camel_case_types)] + struct PrefixValueSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::ServerStreamingService + for PrefixValueSvc { + type Response = super::PrefixValueResponse; + type ResponseStream = T::PrefixValueStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::prefix_value(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = PrefixValueSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/penumbra.cnidarium.v1.QueryService/Watch" => { + #[allow(non_camel_case_types)] + struct WatchSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::ServerStreamingService + for WatchSvc { + type Response = super::WatchResponse; + type ResponseStream = T::WatchStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::watch(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = WatchSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) + }) + } + } + } + } + impl Clone for QueryServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.cnidarium.v1.QueryService"; + } +} diff --git a/crates/cnidarium/src/gen/penumbra.cnidarium.v1alpha1.serde.rs b/crates/cnidarium/src/gen/penumbra.cnidarium.v1alpha1.serde.rs new file mode 100644 index 0000000000..7a58283430 --- /dev/null +++ b/crates/cnidarium/src/gen/penumbra.cnidarium.v1alpha1.serde.rs @@ -0,0 +1,1047 @@ +impl serde::Serialize for KeyValueRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if self.proof { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueRequest", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if self.proof { + struct_ser.serialize_field("proof", &self.proof)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for KeyValueRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "proof", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Proof, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "proof" => Ok(GeneratedField::Proof), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = KeyValueRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut proof__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Proof => { + if proof__.is_some() { + return Err(serde::de::Error::duplicate_field("proof")); + } + proof__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(KeyValueRequest { + key: key__.unwrap_or_default(), + proof: proof__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for KeyValueResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + if self.proof.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueResponse", len)?; + if let Some(v) = self.value.as_ref() { + struct_ser.serialize_field("value", v)?; + } + if let Some(v) = self.proof.as_ref() { + struct_ser.serialize_field("proof", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for KeyValueResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + "proof", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + Proof, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + "proof" => Ok(GeneratedField::Proof), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = KeyValueResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + let mut proof__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = map_.next_value()?; + } + GeneratedField::Proof => { + if proof__.is_some() { + return Err(serde::de::Error::duplicate_field("proof")); + } + proof__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(KeyValueResponse { + value: value__, + proof: proof__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for key_value_response::Value { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueResponse.Value", len)?; + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for key_value_response::Value { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = key_value_response::Value; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueResponse.Value") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(key_value_response::Value { + value: value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueResponse.Value", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PrefixValueRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.prefix.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.PrefixValueRequest", len)?; + if !self.prefix.is_empty() { + struct_ser.serialize_field("prefix", &self.prefix)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PrefixValueRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "prefix", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Prefix, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "prefix" => Ok(GeneratedField::Prefix), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PrefixValueRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.PrefixValueRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut prefix__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Prefix => { + if prefix__.is_some() { + return Err(serde::de::Error::duplicate_field("prefix")); + } + prefix__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(PrefixValueRequest { + prefix: prefix__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.PrefixValueRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PrefixValueResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.PrefixValueResponse", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PrefixValueResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PrefixValueResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.PrefixValueResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(PrefixValueResponse { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.PrefixValueResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for WatchRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key_regex.is_empty() { + len += 1; + } + if !self.nv_key_regex.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchRequest", len)?; + if !self.key_regex.is_empty() { + struct_ser.serialize_field("keyRegex", &self.key_regex)?; + } + if !self.nv_key_regex.is_empty() { + struct_ser.serialize_field("nvKeyRegex", &self.nv_key_regex)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for WatchRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key_regex", + "keyRegex", + "nv_key_regex", + "nvKeyRegex", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + KeyRegex, + NvKeyRegex, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "keyRegex" | "key_regex" => Ok(GeneratedField::KeyRegex), + "nvKeyRegex" | "nv_key_regex" => Ok(GeneratedField::NvKeyRegex), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = WatchRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key_regex__ = None; + let mut nv_key_regex__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::KeyRegex => { + if key_regex__.is_some() { + return Err(serde::de::Error::duplicate_field("keyRegex")); + } + key_regex__ = Some(map_.next_value()?); + } + GeneratedField::NvKeyRegex => { + if nv_key_regex__.is_some() { + return Err(serde::de::Error::duplicate_field("nvKeyRegex")); + } + nv_key_regex__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(WatchRequest { + key_regex: key_regex__.unwrap_or_default(), + nv_key_regex: nv_key_regex__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for WatchResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.version != 0 { + len += 1; + } + if self.entry.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse", len)?; + if self.version != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("version", ToString::to_string(&self.version).as_str())?; + } + if let Some(v) = self.entry.as_ref() { + match v { + watch_response::Entry::Kv(v) => { + struct_ser.serialize_field("kv", v)?; + } + watch_response::Entry::NvKv(v) => { + struct_ser.serialize_field("nvKv", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for WatchResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "version", + "kv", + "nv_kv", + "nvKv", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Version, + Kv, + NvKv, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "version" => Ok(GeneratedField::Version), + "kv" => Ok(GeneratedField::Kv), + "nvKv" | "nv_kv" => Ok(GeneratedField::NvKv), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = WatchResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut version__ = None; + let mut entry__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Version => { + if version__.is_some() { + return Err(serde::de::Error::duplicate_field("version")); + } + version__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Kv => { + if entry__.is_some() { + return Err(serde::de::Error::duplicate_field("kv")); + } + entry__ = map_.next_value::<::std::option::Option<_>>()?.map(watch_response::Entry::Kv) +; + } + GeneratedField::NvKv => { + if entry__.is_some() { + return Err(serde::de::Error::duplicate_field("nvKv")); + } + entry__ = map_.next_value::<::std::option::Option<_>>()?.map(watch_response::Entry::NvKv) +; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(WatchResponse { + version: version__.unwrap_or_default(), + entry: entry__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for watch_response::KeyValue { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + if self.deleted { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse.KeyValue", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + if self.deleted { + struct_ser.serialize_field("deleted", &self.deleted)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for watch_response::KeyValue { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + "deleted", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + Deleted, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + "deleted" => Ok(GeneratedField::Deleted), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = watch_response::KeyValue; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse.KeyValue") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + let mut deleted__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Deleted => { + if deleted__.is_some() { + return Err(serde::de::Error::duplicate_field("deleted")); + } + deleted__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(watch_response::KeyValue { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + deleted: deleted__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse.KeyValue", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for watch_response::NvKeyValue { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + if self.deleted { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse.NvKeyValue", len)?; + if !self.key.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("key", pbjson::private::base64::encode(&self.key).as_str())?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + if self.deleted { + struct_ser.serialize_field("deleted", &self.deleted)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for watch_response::NvKeyValue { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + "deleted", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + Deleted, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + "deleted" => Ok(GeneratedField::Deleted), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = watch_response::NvKeyValue; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse.NvKeyValue") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + let mut deleted__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Deleted => { + if deleted__.is_some() { + return Err(serde::de::Error::duplicate_field("deleted")); + } + deleted__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(watch_response::NvKeyValue { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + deleted: deleted__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse.NvKeyValue", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/cnidarium/src/gen/proto_descriptor.bin.no_lfs b/crates/cnidarium/src/gen/proto_descriptor.bin.no_lfs new file mode 100644 index 0000000000000000000000000000000000000000..383649ad97d9ab3daa8efd3942653231e8e3d5a5 GIT binary patch literal 102050 zcmd?S3wWJJb?@!{?yFzPvb^Oaj*^f!%FRw}C2>+n94?h@IT5ujA4yI~PkWSYY1>wi zwAH;O9CN<%9nw$;ffK?hH02TsUrP!nKxx0yrhWK&fEGx~C8wkW3guP`w2(9{kP--R ze*ZNy@4L68To3u4KF{&v$KEsV%$hZ8)~s1;&6=6`NAD=a$&p6m$ZY+Bg=S;1acJrA z1=ID`RC8uwvC$l~yy|*a!lfuNurJ=QXJ)p(J1sa~U#wml7Y>uTDVO9ocWpmwa9O3n z&Kd{Q+L+ZCxUCrXt@5>6jSKU&IYG>AUOQ&LtDEBTLT&2BwIlUSk!6Qu^;vN|U0bddyWA)mL+lzKt(H>>D z72Fkf&(&Kk@R?jZzEIy(u-+#tdY{qC^}Ng1nxIm3C|+BiUz&3eiUwgN{e~&A97;vi zJcx3u-h9c-RDDy)0Gzu5g>mU#!o!W*YOG$_C`@6+rgP)MuxcgUISt zpBzD?oT^VwHKyy6GxLWVo7NhD(^mi( zw}QJUH$t(o9#=7>t0%^#*75np+Vz{djYE8@x;-w}r)PxQrXD`@Y~Qp3^050p777mB zRf^Xy>+Py+d3jtsEXu%?;{GQ|H&>^V@NV>;B!D);55-;c_10p2+HsNiziayw^I_~N ztz<*T$WN6GUlgxTd1$g(n`fFje!gG@t(TE?a5(VZn5vESq;M=<-5tk``Fi6pQ*vq+ z-ddf^qbjlt^4MCdxSM*_%QE&XKL&U0bJKYRhaQZ3nsv=FNbGvH){q!{!k&%!YOZ7J z0uYlTPbYb*8{=YYvDREP;aD7ViE3}0M*x~+ERN}C z_2;t==dHu%tU$LD^74N7KPO(-(Sax1^uQ+y@d>NKuD&SVxHLaAzgTZBH0xqnCo(pD z(@C9`On0!ow8L<$uLFe3tX>m$i9c%1n$2@J8@E5XE)R-Be9fKtSLLFyc39Qt#FfMK z+Ts!evuV4L@{|>D_qYNqhhu5s>RjCYl6rII@XVA$O5@P8{n8bcSEbe9Rc%$r7i)|4 z9r3~aR}GJi>=_x}?a=H1-~SuAxT~!v5{E1jhw4qwDn3;`C0=`YX?E5zom)#Q$`Hdz z#@TWvTv>YcyV@UD0#h=nbDJw;Y1(~Tp!fA@)*t`PrC(U66ncvTm&6-Zbfvl;REKx% z9U2>&7#ZE)yQbO`xAm?!ci!5{x5~Z$g+I~2hf49r)mEQX<#E<&^`JNwIz1E@XKRP* zv#iq^g!5Mn!fGuH?p2jBSF`$jPa2V+XZv|i4ua&XCnIzhIUU_atF>blWfm#+p}#HUFGUTDaBwwAH(}Tu4Y?GF|avKb32K)5Wvs_#y9%}0fe%D+LB!C| zPh4|gcyjmX!JT`Ddvh9k%hE&Cp(Gm*$NP9~d5*7~T!N+VcLx!I3fYqJgP+<7zA9s#Bxsvnr`G zk*ML!MB?SyoOcvFV^tE^AS@^2z#X~x#MSMJRdINJT$-yd9&H3Yd8IP4&pt1s_n5EM zuD&^3S1}Hpm9qlVo1*xHRe`UH=xK30GrzF3=#*)mg8ByFz%h2ovF_+PPP`C_1fIb{1La>?kFCv&9>@e>)q$^@KrTT zEp4{dur)ZX;XbD|2Hvwa?vlan8?T-bcVAa~No_J%*3RJWQr-b)Y}3d_D?%UZ%}Mm6 zR%>3ixt58wV^w_{)u&zIOZDgCKC3auf;F=+Tc4D7ptUKcLM33fK_&M20>Y?;o`0HE zJ5ryoqs7%H>%X!@zb9++)00PQt)rVZ3c${2Q*P{J6}-Zi8@6IY^V3&SsJbIQ(V##> z2sk-)v_AFX$)&}^mu@TR0rYWsmc0$?GOWBYx1MD>2xbxSZpp~-Azpm+s7gMRy{A?&^UD6lw+((z#N{r ze$!c0SUa{}AB?RISax*_uv~n8S}<*W)7h@5%aQ`Cv%LD;_#}qG{BEuK z;^eWJrcKtlnzM*g3A53c!X&$zGE7B^RQJZG&o>a42$foM8ka)wcuv-)n0GB^y<~n0 z<<$8GwwD#omZl+JwAwi*v>2on+vfzD_grm(NwL^GZn`@L6dA)xSFeiC$)?ZbZ2d@W z>iDE7>gHF%D>i=^-{oR!lVpdJe*;;Ky;k9c=<#O2>YJ2IyU0R8E zl$>rq=4#3sMqs=c*<}Mf=2O)x;?i|3gRP~rXI1|-{ymS$;0wnM_}UkaPwpQb+c&g# z%vZ0T92aJ5FFoG5W?L3QasAS~a|Ph@uu|*WnWZQDx)%^nmak}0PU+5-)v~IaW30nN zOGhS~_37HwV&{5-cY{x3_NDp@@mjfJ=FM%Q)uU(ow#O&0BgI&#G?vw`UKyA1O&zJj zt$padJ=<6M$T~*Kfts)?qU%_wUcEZ*^*&oBsoB0Q6ae}Dwmv>~-jloART^9Gq4;#G zV_ux_ROZFOc)fQ6O}3U678};$KXWdz{$Ip*=-k@A^X-ms z39BdL2$xwB3^gsvF46zSDDLU(7!#=)Mhp*MvoXCi#blF4`FQPY@O4JgD(!$WIcKx> z$pG4&=^_`zeE|ScWb#<8i8t{GJ4B|rwtc$VeL>6wc46MkHE{TPchO^S81#7j$6Wb= z3*w@QpK2U?0fcj_5*K!jj_qc7WIc3ZY4X6x@Ge%!fuD;@PNrz-Or;8yT+%xzY+=>c zgZp+4k5LV7Ts9A@C&U}Zh9`zuXh-)?9^C&6`$w;qXRRYUtiw6RwKpq0K6-G>uz5`s z_b#85)sy1BiJ`G8h9@S?p^Q)aAyt$NE;50RGmUa9RKfXu^srIQQXz$3z4g^HapumHd&jU zK|OctVQg7L%6YQu&y!bO%qlh0?Ww9RZnNg!m+FGjGVnPjO6r9PY|yC>iV;@T2irAz z5&ubZ%kovdL*wjN)mK@poIB#*>0;G3(+2(~7x#EWHxRa+VgpZwVx1#$#$%1h{~tQ< zz;ET_bsfH+W%KMYvHO*H!_0JjZlQsihH)@ke+l~-B16yimAk7QEewui_4caTj=quI z!}|`5B9eDq6BdcF-Zt=7tV2sW^QjR08KdODfw*^hZzRxH=?{xuU%Y<*C`Krj=rL{YiAL|WY($aVu zrW=I8)IACk$vHMUEzbdz)5N*W9F~&3-D6!Y&lO?2r*`B>Q_TiI&Joy?W?RAj7si!9 z2kkt~Llc}Fwrw4uMpl#|BFR%T)0(usFN;5kl4x^ftZSx~?uriF5yx>*Hi* z>5+Xt{Y{rnzkjRz2e~|3lVj@)g8;8O+aAaxnZBwanPH=)Lk7>zErv+R5b3IxcI_P< zAKq-c^wTGBl=xcH6W4qLoHc*7%sOY9tx< z6?bl1L6V{UyFD&oc3^1NFOW*HIUyj>0j2sfOkI4bVzL?hV2ZgB|E{`UnqegOD}3W? zc%rz`feic6_>3f=VH@DrWN7rLwX7=b_XhV+N)oqWd;a5*#5&SOfw5uR%mW!eChL7;pobcLXvEB%<*VkEe`G7J9>3Sh*+&UFW0~?#p`TTQuahCHf35p7)SIE z2hoqH?2c3RM@Etg%#Wsy81)E@4g0;s`~Zt_U|*@`reQBL3@~`ayTHQ?ay*UAa#ei7 zs;tKb_vvB(a?;@mj-$KTnQg-pNR9^`mP|?GPvDNkJ)I1M#5w8U#GXqhuNvArvfDxo zIsrK@qJtnR?&{4C6e_vi+`xFeei;4y( zD&H*keycq2&-u8TaaQW{cu`!L4o(yorTN^)nGB)YUcfut4lu#;3R>)R7( z>wRq%Jx;CaPOVtiOIQa;>f#eXVu_(jfPrf?*BkL~)SJ&@j0Wz&5z`qK$wpdbw&6pL z+-PrJG1{F~2KV~%V`m(Nyo?qahxGuu&COUBx<-tZwUydyq2z@26S9VyDaT4%IkKheq?6es792ZCGZ|ul%J^Mqx{C=N~=CsL%W%FneqLOAF7#p1!-QN3+m6;d!ezQ!xOn0ch{le`RBjJ{1UDErH=G6(M zE}A4ZXJT@6|K4lB6iFLCY;XGzu_t>{_;4A2p40a6t**oXzjQFZcM)#S;mU_hIf!L%0_s-Rh zCx`0d6o}YJh^QnNKusYrhYLmjLz6|g8|?7$Cx`hE1DDZgH4ZPbPf!mWl(ZJ=Q{o_M z&4{s@;+=WNMJ*YugRsTKl_TTH_~@RAtB1yh6MiGtuEN!@JK1>+ABU4&qX(`T8@b}j ziR8-By}O6U#uIc`a`8)#>^wLzIyN3B14H9f9k4G$`>#n*a8cIBlhLunG;*K;9v}L| z$nf~qWMu!Yy$7WtZ%u%Z>>r(o6R&b66Qf(L*_HK@(LKq&;jvv;0%mCE2z&F_Sc7{; zCibf_l+rjEN)8N-O^ob1NPKQ`;NaK+Y|=zM+dVQ)%r5)*y9a5XHk0A2@U17~SE9If zb~H}deHb1S_U+wFb`FE(5P_d+il(mt=otPPbtwC_3*v#`-mP&$NX@PhenAf$S73C_ zRu6O>lSI26Lwl3mL;DEQ98Wes5mgwNT?fa8_X%rAF@A98_{7M>!HMDIiqX;CMr^+; zFn)Q0El$p!}OpFv?d$8M;11-Q7mv z(f#VJcx8BW>>7a;9c%=)CRbm{jsxW(s!?l51ku*du8H`8`pUj>lep)?#Y zKH}MhO5+E2UFm5)7;pbVlngBq+t=KYycj{ynE!ddf+5(G&A(t-$yJ0~)V3hJcGfgA zk>?H82!!|S$_0q^ZRQB#>^Q~8YxCDJqpvuMo^q_VxHWlU{qW&rcfB@`I7d#Jq-ohC zL}qwvyyb)w^I4&1%<~rK+7Jp79T-9wELu_c+E@Yvg3O(8v(ms~0+{=vJv*7Ife|1$ z0n(b43W)?G=xK5GxWr5?k=nLwHIGv%Qky^SWuneEX$jSewz)odWH2p5%%2qI1d*1^ z;OuQKwzkl(N}P+T`ORxK=~tziKX1+Mcx@b2y4+8fdH$NMmYJ*O_0wf;U2~CTQb|8u z=C(D@vCOlpdHr;mgKN&P%(JR_{dAcZtU28>Pp{_n(`8<~=G6HAruDVeJU{yvJ#Diy zgKpu0Pz@g#hj>M9B1WOuA)?2$gcHu?6_ag;#13&%n>_+QEFPUhDmUiOT}+NOnlDbK zmlU9v9BMQcQ3PrW3#1^)b#YW!Q!PAa&1Jd%|CCHHsEG8%hIANMZh?Vl+Hj!(5UIIh zY={W5C4==@(8UyP{A0%ej6i~`vQ-yb#c{r{2C_b^G&n4o7?(!eU-Lo)Rd6KvzbAKt>V>1j9lf7;hvx$pi z@O_R0VCMa>*4uN*aL|5DH)eqAbQ!&;S;xF)#yWCj+L5uD>B{KKhh~_pDCNlKIB6Yi zEX__!*A&qRy3iRHEdY)vQ0+J(kQq>$Jytv3QXwL0>)RSlxSE2J9yG~Xpt)UZ&fw-u zyIr6+`=cJ;w*tM{zu^Sm?P4`Q8lC7r6)8BH7A#V5l!Cu%^kLGDNB#W^$$_@%6nMwl zU*vqHSU!v(W=&((tdf*1I=UG<<^Xf(b=-QJAdFgA8=B+-C)wh-xddwC(Fq|1YU7(s z)-ZJCYJMW>_n*oXoJb3nDL9c9M8+SC27JK^1rJ6ihZF@5o_dC-3WEEpwBT9_UX>PH zOTnwsf^ju}by_f{;MHltn1WYR@J}4quoC@JbV~oba~%ZqQa?zGrTIfzY@tP%BR+`A zC@9U_@kwKTmZ8Q*Y^HPU5L07z30#DTE)QOMuX<{-{7HI zlb`j)Dc5mo=4j(k+D48x4yA3VBZr7)bZz8l-w@1^n{LZwl$@)%F$%D#~6qP)v9O*Kt2~i{(k8H3>q?B4e``q}Yh6>fL zi_YnvTaJq2FP|JDsFiyV2+o?_Wuoz;_UIkWH%fQYp}SaI2Ww(z#au~ zzAnXC0`R&Gi6>tHoUc3c?D*M+GX>|Ot^HU6NY};2!Zr&W=vaQthWBc6+Fp_N=299J z5s!0SCWD}grdLU(hG*TdCfHb<;FE-|vG;9oP2`PXywr=x4kcJB>kP1K+QXmz8EOedj zt4UHWBmqbTAPXrFBxN&l+FLGK3y@|CWGz6NU11tZiZ)MjS|23=(mZ1z{vN^+f+0q) zk1pu{M$V}`wpkmrPbQGXH^T<)W&s?FP9jE6O(ZWS!N=zOZ(TwOnX@>(X^<%jO$8#N&5G)Q%;KqjT^eFg_q}| zq+befhqkBzXzyRV&X~NrU=`w!EGgMl4auC6ef=SsSMs#e<1H*;QML4n+?wBI1v{l< z1@jhiB2O|izKfL$5nOmht|ASwP>5K%3csF~QRyRxh& zN(M*>9+D{}b?2rWliC*s(oH#LwSz?J=3I2X|HyKnWPp?h(#<)9ovX@%K=PDxLelDP z5$6ggs=g+NymMs{(nj%kC>p^SL|AV32MhtR>3;;4Qhn zO<4vA+>$%x)U*W(ye?Nbg%*adU)h3&j?Y;OXbfbuwV>g9T@FhkAcfvQ2CUHI`@j&s zK3BLPEg<@lk-^k3zCO2ZAS(gP*XPbWFUtVt>vISTHvr)98yQouYAG28fMts$LdQ{@ zb&F(wU&goArx!t{#XanpE;a#`= zi$uHJv$X}-(DAGo;H`uud5%Iktrl*7ES}{nl;Lw2eKHu$+cSz)`EEUZS5@x<+o)#OBGspTPD6L)xz6z(N;e^s1Ztru|$s`*|jNr z1=ibhC!ZUVKze%)&4|O=k`)9o{Z1}AyMHMgDYrz+GSI%&(HUG1+Pdf*urOY$jg8!t zN00>bZl^?*r9#SVN|~fVa9kK7=`g|~^k6OY_?=9Yqs5RUHr!1Z)EJTjXEE0z)XLHw zxiw$N5!=LiA+dNzu5wyDO5gGt=y&C!9sNVLrkUdme;;au6n1xhOCGi9M!hA^2|m-t zk$FzcCT#T72b1x--y!pO0uVfb=w~oDh z))t|wIr9G8#?!J4@PB_UIU|01&UXO8{sAU>|LdI2w!8;YG#&Z4ih9J*MWC&Yv#yGn4!fBC(vcBq)cZ9lfp9IFZrJrcYQ1u?dg9<&YS@1SyR?O>vFTT zBiTKw{afwH4JKDJlbO|?XRrx$ELoG7mGfGvlMEEQc&X1OgT`FF(bDBg7e=LX1tL1?XS&vQ{2ubMpvE12C z@8K&;=f`sA4m*O8+@SR4-SP8`8W886Ty$3d1<4T1@13e*QGEaKgztK~^X~1m6f@0c z>IclEH{O%$I>DnNTHlj9(P`d!o5%O$&Nwsvc{RBMxJ#dO4vP?@+t;qpvP3r8XvOu^< z@&<&$u*uf?Y_986KWvgUpUs`-6m*b>p*|}G-LYF9hPppj+7Xhd+4tu*oZ|T_FaEke zciLs~FG@XO*8goTI=O#evdivMVb27QvE!~}ir^f!rBlU3u4Do_6v^R73t^e6zwOG2 zjE=mv?*2Bn&Z&K*z?!5pheQ=wY=Qf|#hqg?;l9TvD#{JefqOWSg` zxFpnz8xUPrW)rn$KU(mAm3`xWa8l7QQCt+m&1>H%7YTXiX%362_3DHqg*my9m`{cZJS-W;CF*0#wJ0mO;P17MM{P6!`nVjFxs8ju|b3 z8h7MsJoujc@1rHm2QY z9)732h74RXvKl37e5btzv+n_xhZLj?Su)mT8Ps?n*SjHYgX#NVJBjhTDkCFtg&Gfv z#2=0PsIU}&Pn`WnQ8H0GvJL%(dv+9dksdUHv#)42mJn7_F-^z8jVEo@rZF5{%lm@h=`%-KGBJ!kW6A&&n%nTiqlSd2* zT|}bILSj4AI`L#oti{BAv^mcPQY+n`cR4Wj$b7olD(_?6*JuOB&L{GVrroVNcsplm zR`Oaer_#ee*M`NkZ8FQQgOC}^u+2xthCU(@9RJ7F!uNB96WKZ1Tc1C&cyx7Ir-O_C zR^>DPU^|eEz7ygzJ;1JN;RhPJzGOd}ZY<{zSOPZL-i8-$QkBZ^V*8B@@Cx%1XXzvD zjM+^MGBT#=rp6;W$sTH~s}_FPE|%6&gN%$Z*HPn#x!!E<_f!i%$`wvZJJ&-EGMo-9 zVbDX3ALTZln6|OLT6na*#(HXyk=0mFjYn1ETu+YPYT?K9W8fqSu~vudLrH5R2_efW znRSo@DrC^X0E%_)Cr=9020&3V1GNEAKXJ4|=pjc+FOQ;|BIj}>P5GBcHwdjOEG;`J52sBS-($pX$t0Cp(M%DO)EDHf8 z<5fc7&zxE5H+ZKLyTwzp-JBihAxvP?8GX>1dC_fZYihYI022qpKBR#uzDQ^@JIGwC zGo$z|3LPb&Lgvvb^YL1ns_9w7bdIId(llr!0@t1}Ex{~A#H2$@Z|E@H$vW)%M~9_W z6h!WuBky!4#R5apsY)af>NiK;=}r<@H%H#-PLlQOX4D-IiI1;|BBxg2+6NNJ07>@! zYa(_JrB?CP3U4?mB!TprsQ(lXi4k~hh7-su86ZjPer*Ox{d;Y+!I=j*ynyst;q(^i zqBt|FgZ;5QZ`yKI;PAm4t71>ZnJqr{N1h3%cJKZ4DO`R{gny z=i+F(Q;+ob&@Eke!BL~Jf;Z(fRdU{MS#7g}yh=H{6}+IG=d z);e)141vD6*3A-=gCXq|uv+yRrvd8Iwpz`%JIYuRZ4V7euT{BWomHd*s+#(O|^_B=XgTq~$ z4R48XGTiDo2KIVKg!y}e;~1EPIP|H&C<C9DcsG9PNE+k42 zDJe@IIt<7`H6Vr`u-hYvrLz+5FgFD{+DT14HKQ+#V=LJ#XEf0-wFO5SxvGfKVhCMh zQq7kE+Le}fXeDM8n-ZTq|9Qzp+b=bK^gvU3YRd(h1bUXB=Q}c9kVg8B$h)Qs3K$?c zaAtg?6E3Ku?~I5_d5PqOi3GXD-Bg3$>(@&n)L1XLuqBMJ@4eVTqsw|=pY4wM*aAo3 zjtL2KXRh%>rkb`}oPz+>&T6Dx&|VK{gf5in!aFnhC>8mg(S{(Fwb%2`h(MPgx-Os} z->EKqJD2EeXq$ux7?`)$?Q?snsaP9Z-rD0{iOaX$eDmux6YkBl$kJLepyRyh!+BB)pfYt4_O(VrbMFSBT?=}|KyW(}QhK#> zx_S*GWs^`sGFwIfftw+`0KbWDPH0Xemd)AVDv1H1h6CuprI%@ItevabaB&su{J*yH(#5j*+xQCaKPH#8>svI*VN4`|NRt(zFYtI^u7MWkEik3>_F*4Opr?_>fa)D$2z*fDJe6dnyl! z$-S@`+F&36mk(Lk>6(%Oh`~t1;f$t_O~m!zBDxq&pfSHKxR=vK*1-+&u*N~@sHFz0 zrjyyEj8NMqgqCM_92AEa>&UF6725-hnL9bfMAj)LorB7lS<3{n9=UEK@6An$qHq{< zVByYBZ{v`ah2Yy%Z#yzEXk=low-^+BI;;{nmL-H&^lACv(#uwZ*hFn(Dp={rQFRQS z)6x@ke&grmbTCXQh+4r3Ee7kjkHh0IEDOCmXds#7cO>c5xtfr5hIK~kftyP}NPTc3 zgJMfrk+;IT!o*m^M<$2yWbiuqQH*gwClI!K`H5G}52EKnRwcSeTZdB+sJCnTwQZ|Dh0+P^pSq@yRxNCAmt zfTW)Mb<}mHtxbHj!e=@EHc24;b>st3N-_rTiwHK9!bl28Bm*RwTK7eM>xHjCx-TN$ z&=&^MeGvhN?~`^}(hm3+qv*>~zW?@Um9UYZZ911}TY!hlKCZGV9@i6QGAL;u8;R+hmK{axgvt&~B5FGUt@wLG57zS07p zvle8vf2q9%G1HeKA8ln-XyHp@%y&zlEXm&fdK6u_dgRb!vOwpDbTIKeO8w&y3Om)5 zak}A3c3{QHcI=Zdjv>6rGZxYfjWCNG0#~{VvVq&RFZA`u?+fvju6{i_%?0d{w4M!K z5YJjasroIMQvWGyF2m3novQ@bH0w^CT{V7&Zc0t_@l zha!%`Nf40&63GBbQtP3p%Ng)|1=2&&sZPX^1kyth$KlR`I+bead(oOlpw5Zyk{ngW zZo~HkktOiGsG>OQ0=J+*knabAkfL#tu5prdPoDWe6_-7xHvD8KzcYL&dcEhL4FYV=)Z~)gk*pug8U%z(SLje(hnjZ z{YMf=KZuC_)9I77)zYKUnpfv@{R15Y;T8g(&~WB#CvjxlJQ@*`B}CSWI6n@=Aw`!+ zIz&i9oF50`kfiF5193=#$d5&w-;^j>D>Pr8$2284cSH|km}kqMtd!;AW|>*bOd7N9 zS9Pc0IKY>J2CeJLmJ9vV!{QEwnbyT2QR%F*Z-U0{EhX_Ah|w!B3%wHOzC7=JFnl$< zIauNg)2o-~F}NLl7^oZaoNjU-#knK{Bn1cEkUfh%XGJ8}bA|IY^{#%ttP4 zd9BGZk^z!r*^PN0B*<4F-Izbw@e)a^d*)g3un`(0UYU=c)Bl`gBwQDWjOAho+r&oM zM@{5-Y1aHa^EP~)o zLhyY(%$m4bx--A#_X@e~`;+JW@Bd-n+(j?O+wWbM49j@+9ybdm@JGn)c#$oY7MbR4 zs@6bKTXgpmwa<(=xlVV3WET$MW=$hMoyB4|h`{tLLcw&MPFl0NY6Z21%S@OQs53ew zhqag#kgXrV$bXw0eXDX8-aCiF$IL zCrukH#h5Vv+sX!2@;Js1-ErrV<$>UAwK?86i(+UclUyIQ={!x@Dpe{4L3fq8@F(PGnK zs@cGGBiBHHo{>PITa?H7Z2@eu_9W{h!WJ5C6!g6)~(y`#$pv>Z6hGpzmZ)-Jsjd((ijWlmCkq=jTvJ# zb+o~882P^4l{l>(G56lc!|WV4fp5({7^04)RHeF%92`6@EVZ0%Ht(?JAaerS9)T-g zYFS&w#6m;oIh*hUzHeL>Zr*J*LOL3vcH(+%B|mo|e*|UmC_V|i1-ifKGI51@4cxZF zan46UbfS8AvyqYzQd!;>cca5FBz|NKv|*9TR*wM=k;m*cRI+o3Ew(~zO|Y6mX094p(7Ye&Esu1YFOtv zK-i(_uz-Hde8(lB11dMs@`uYS*dZ=S7`Ke6U4? zGdOBkjD2kH5BGxrrQP>k8ngp%vhjT2TuRx%DaLgU5HK0h$=Uk#H3f@kawJdnek%cSNBz(A%qAe_ zyhm{G*r>wMbPUjz0w+Ht6A%`Mf?}L{=jC2nS}^>O0o;S63q~aDq;lC^Dz1%`N1S6s zE}}d0l|FY?F_zZ&2l?pC{(s0Rwpjk+dh$Ys93eBMhqooIm{1sbEd?@U>y!8YgSht! z`fUTmlaADDr){ULFM7k$_6;4+r+}%c=@YI5Ajw~@Vye%}z1e2kBb?kiX_7G1Z-u$d z{ZMn(ZQDY1Y>{4_5G;SMu{s5WUZaoD?KFu#@&`ft<0~}!gFJ7HOG6@wKJo{7yecTPIT!JcVq%`8SlMdmh3hnMxxnYr6uBeeR zcp%;0;PMJgK^*9rPQ7^xi;`IiQn-)F<0_VQDN5alf+}^>IZ`~8rjIiB|pxS=b2qa?$KsX}=OPCu1>HW|= zO)_40W#c8i|E_%1MItjkjMrUx&g{K2Z{vmH|Dk+zdH-+bHP?JVmEXx$`)-VMi<{?e zhQ~%AFgr7_RY!x7XMa$XaaVk}MNB$kMq)2n5c8~FZd1Y=ZL5>qK{9=DiNyhQq(}J8 zcxfDHM(1gDUP-n@&L_pu0kDlNEKk~Y5$88>n z0_Mw1w+oQ|+Wpfi0HV@y`Nhq~+b%^#-<| zB(=baTWSz}UimftHP^2rHK3&uLheO2eiYQRv|v~AIDWUqwKg@y^)FH&bsoWoW*W;$ z_jcPQNK|@m+s;fAqXu!ptXpA?_sRj-rGZ|$5yYY)Y-4;L716FA+96v4{etn=^fE=x zQ`|=8V&AmZV>)K^C;CeRpiM!?EV8>9(KHFO+?z3r?CyK>y+JI=?!Gs_#d$T&14eS| z^8zS1_TKz+o*VBK$0`W?Gx_Lq`F#I#lPl8sXj2_3N-qLw5m)a}y6|py-4={treKTw zfTx9!e?S1*~b@3Ix9SVGw8P#8MH<1inU@_g<9i^ijPl4)b{H+&&ZY2xh>4Gc~471N~i}$(b1NTa)wV*P@85beo^>z0nKq>`~3$oUC zAh}pq%P`PHPs{tmO1FLe zee4H^2QU%ZghYBxswuwPWchPJ6$Yciro7S0u~|V-z{&BN{%Qon|t2U>6(G_(bF!VJOh~sg&^m zmJg)G2_mD+Cba*ii$0d4k!;qUv*=TscJZ><DRn#wVe&z zMOX3yrR$Pg?{Q)=x?1m3mQ+v~Y!z5iL1o{`78Tnf%G4kQm3=2ugUU#h?_?6CY#fS2 zagvLV59Fh;;wT1-WN1e*SPx|FXy@gDe6MqIbNM6fJdodfo|k$R19kp$Ly~qL$UpmX z-wqL44`vcl5n3ceJ34~?VAhT{F&@nNqX?}Bv;M55od>i2tfid?B_Ypei$A8Bf6hle z1JA-S;tp=E_MWWnP-3#`{N_LByVg03?eu1Q5)p4n#SX-Tg*~L50;?EP@u56d5PesB zBQclz6duk;+xx%H-iYiz6e`oqugi z&rY#0_svm;D9|#jqbkQuNi|&%U~132Xw)>t4=EFtZ)O@uMl-j z)A&uzZuYCXwG_JG)IOXXDNicwgkFa+RM^SG*%->F@Nk}6)SOnNu#<=L+)3k;jN!xi zVbf{XxYkD)gsyR~4@tjw0(x}qhhb}iBwhPqp4+xu*ShH1 z5A)}^y%WCDwIAj$`nmY^j#W6!@{@eDrT^F3d@GIMeoLmjvCqi!=!4he=?-asU!vqb zHeg$ocH=DVkPF15rd#tw3JGX3(00AtO1rDug+MBFx0~+%BwO%w+~y~F-ez(A>!!Ou z(QF*CvEr!B%L`o2cDcI>j)mJ!KdmJ4<)K;A?ri9s(vqUF4m5xj&{zi=FE99wyg1Ss zo|hM@Zm*6cXuQ0@y=)dt9Y-ACd1b)`Q)e!wWL$552VAtyIh#ml3Nz*j%7%2nT_G&h zB2Az=ek-v)!S~c94qtZ{Hi@iHiPgwrbvsY9y= zdcLy2`Dg{!#}TKSZq~3aWNIdY^vQCkimsStNUjxJx!b))Bfhj##3?VV2X_z%xUUEI zn={;X*6QYr7uJLO&Ekds&&@FoTD`772=DuHW?Hn-TlAZ9FBoW;q^R0y1PPW^k>(;k zhoXfYIUHSpZadU~zEhn8KbaaFWU(2O$QdyMzAU|c16y?0~2}Y#*^+mwpI;WoO>>vfJEV2F;Y)R z1^F`EGT2TCf6;&>?h_MY1B9S6fe;%Y#2Yd~=xo~?3OsS`1k(lx@rJ_r+u{#9LR71T zH%TJ=;S&)ej2Ar6TtZ7;!MRVG2~0zG=a@1ZroH#hr?4s;SkoSrloP5DfKCJgR3X5d zG6GZ~z?-ruT!jE{(iGP13USoOMNI1d)4E-O>NHLL&~zV`=UWT@b_KrD^ji!5b_J3& z{Z=*oe_7KTx%H^PRY3D*5}I%6DO?4_UH^Uy1}9M4n-by~O_+gEw=XFB$nD95yFDEv zYy@+f514NR^S2lL6>M>|5zOCS@b?Ch1oO8SxHssPIjMON_;(9Ly}UHt5qJAdlHX75 z&VZ-i3sW(5!NBhp`~?HCT`)k>T`)ir-Sl@0Trgm|DN64h1-sOkahLLZMeVI@A6RKD^S8!2q zq^MNeab>5dhRtU5MwkIX$=|TxS&4L8W}DsX9B#CBtF4CHyJ-S+Q-Imm!q~coUBkBf zmeVy;j3VRMZ9(8Bp@kXX^I9rbev zB{R#>`e??n6q~f)@}imR!S@5$cIPmoMmUQ3Q7wwn;%&9f&He}fF*?i7I>@R>cBawr zj!iIx9G%><>LP3wkJ+9#erTKe7%hyEjDML&xBa4rPss4LIsxsHW>dw&+x z`P3N{bu^7jF~5(a-5jU?Oevj{uA5#i>~%0B%|$q0T0BL2Ir~U*rQjF!lL_YudjpPL zOh@WKk`siU7TSTMw`meOemJ9}B+rKnz3w(eC<7foT-fgJYaUl2?>y^Dw6O#ISeA5{Fexdu z&qap=DdkZ9ct%RG)yE6onC`NDu8$WuNa|Qp`&=I{oPJ?QLduU9&bz?YhA!5IdkRsX z`7OHS@VKYY)$K}}!=pV3(JiS3s7vd}J%t{3`kBVWB1IVk%wt=y%M9Cgx zOqSyUGZ4hJh3Ec4{rChjY-@6r9>mbk=PTd6o{QkU2-!Lz>7j=|xGGns8PIKiir z)#mQbC6p_ZHz-mOihTHv*_EqS_!_;o?78pQe9d}FfKniS*!3~axi0n-*|dlF!l{5x%UciHKH zLUf@2Q+e;*GwRQB9+vZhnC8=Z0aW^i9e{S@X(m)y19c0OX_C6}qdCQX6T>fd(&jXS zlX+;`x2iNjwe^Brb~>GVK}7s&tKr3_3VB}Cf*3CEup6ja$+hXZY3>QnzL94rMyL4pZ zVHvbT+1l_F3|g~i+ig?;f(#G?GP+y(;b5ck6=WU`HY!Pwc~~~8uCWn=td&GK#;k2e;HuKB~f zZnQ+&PMPHZ>t*Vj!awWDwc*~!!OaVY2B#JlwhX!`lS$RkWOFjsEz1r}Dyzdzwy8inwAwSa^+bXuTQ~6J&MO8a4IezFH;a%r!66Yg3&Yu5 zNfg9TkVpY9XIEt*49s;<32)E{B}I|`N9)Y~VR1~!7{a3r zVw!|UZ!G#dgZRpb-&piF{g8x5Z!B_LNGc#nrt*!&Q+9_WI_ZtY=j~!N2O39@ywzybi7HbRvRJiXz{^&<`K-W_h% zj-(gV8BQvsgY?=WBI@f9K~N~yK0>Odiw5kRfwT=RvgVH`a8XE^#nbWCnVPdR7>Sfk zLnK+W#9v-NR#n4|JnU4|4F5fSvcx^)+ctEvBp8@VI+nWg>y{!`iLP(c;F7+z=ZcRa z)D@&J&i|9TidIMtH__y`;Q(|E0?3Y`u>f736}=NH@`+@+lV5s@O;@eg7Wl#3)F3*} z;fJmdtTmgAB%|&x#x!*TP5}~;fwRkR=R3r40meUQG7P15IzwCmUQ!nehhWPB-)vvP z8Tvj(!V%3{q@9%_4K>N%jt2k6ZI>15J81;B^|LEAhp}p!$r&%ejMDGzQl1z#Q1+ulA zDQ$A^HsyJQeGq5v(ZLb%fnMzPi=j*)m@`eXuHI4f z+k0>>jD1IucZ1wQsyO94irg9CldP+E6i;)}#S~^;y`#AKBEPP3o8#{lqxYxlswT%eg|ZhD&~ zVoZyy557a=F1;+wB5VPAcA&*1$c)`2=V7xMApr=_`LG0eu`~Q6RfC?8X%K!v044m3 zp6@%L;vvR8lB#tXJVRDeNp3Q|!|EK(VOK*_0;bo(QL`hY>g_T|kzDPEd7)73&dESI zjDSkdf~Ku_J~&+-e#q-OF9Fec+m$HhWQs}Fbgt22IFP`>&oqEL{qDA)T1!WGrN&Mr zDeMARsiPmCA+JwEL&D14#cvX8Zh7m##{#f`>v)16xRLC7Nt?qAot{L~C3-J@alP*D zP}FS|M|Ip3j_^mS{4;0}BNZkX56cBeI8ngG!+PS@VZ3w*r;sfW&BKbAks`=WP4K(9 z7(%>z3ia_vc>faIS!LO@HSB-CTT3<-&4rZ zMs!?Qv@@Cz)2$xzD&eIL zA;e54L)bt8gK5f82jO7gprM*R&mgxMKC=76_4ckma#VqFpoJV&pUP+deHjtuQ@gL|x2?L(r*>b_Z(EUsi1!u!wiQW; zcwdois}~qQp%mR;$oHUya;IyYcKO_eUK+$`aP7^JJab$0Wi3${P|lApWm;!7^Q zEL=9+ZEswDrC4@1l9QCD>8r(pd)mm-5Qz-;aDru^UVgR61KZ9!Vfy*k+G|M3CL^mM z?e=T!HAw$`vEbhR5=g1AWVn|mEW=75YPFl%t^M3@6#aXdq!?6^VNmtp+&7B;ZA!iZ z>l?*Wf@G4*{2Rq{x5V#>1PZxT`c5(WH-_ykyq{pZ(`|k$R&*JpqP{#2x{P2sBb!Uf zo~38BJ=f|EFvbj8yMe~{z>z{zEtlp|+>8m_a_QLD``M&BvQC>UxB9`2s&zp6PdGMW z$nXfVG=L|~9#U~0n?GEP%M-DOmcZ{6%P0B4=2ogFWQ6EAAgC za#y-z58gcDI6^kqq+LcBY%&<^2eqz-8l?ZZSU4+fLm4)v2cb%O=(7n40qvQw=!t%(c(H! zL1h5q(c%f7AC*Cqj~2PlYCvW=85H<&vG6Q*YbOZbGSkQzTPzahJXaaop-|S2d}}{0 z`e*PAihq$VHrVBWWxdh{Q9`eDxQc@fBDW8MgX!?edk5c6;P2W)RQK}?q`i2sldb=eXnKY$W zl%n4(<@+zedFs|*1!%ET=RKNX5juvmAAh>6v=Aj2*4(`2_bW<_>V0~#0;rN1H=5#)zn@T5~>-mff=zUX(&^f)AB+5Xl*NQSnmJz0nCfw`Mz>aC#CUhh@ zXy%%A-(Ab>RMtRTtuSo=;~L^-_xh^5S?}&XCu2ey?puUc>m>cI3tJuNCqy z@yH`4ZWZ#pDuD_URCzIsZ;&l=Zp}A4u&qP*X&(fJ`UXnd{UMUvY=O zUGi~<-4=J)o&=I5FY}PzUg8AjRv_v4Fd|9Yp;9XC|H;N7T_;am({y7J%W4wda)C(Q zT8`gd@}cjfmL+M%zAAcQUA6Q(r8V#I(F-E+@05sM(8-B)65#JFasScNIypgghLiek zI7u>`?+j}aN!og6Sd&OHobN1gk&psS)=5IX>u1PY00bEz24p1n-W7O@uhyRDEs`Mf zuE1M5e|P6I#9QmcTXzQDA_X2K9dD6jsO}VRX{dUtrT3NAe9{kwZ(CDs` zcLR|`rN67>-9RL<3GOQ422$){4`SudBvu|P_OM5i<%=-sN1wLzr8<~VKVT+3;iEzN@)gWJ z8l*2tYs^dE9(C&OpP}>xpo18YDS*t~LHhC)WbO_tJ4ukaJ4oLiDEf(KNc#3j`hFrv zUsB*f(n(*EOp{Lp>ARlS6-sNq;iaz*-hR4N*%beq;LN?hE`+5;ENvBpyiy_P!wTNW%a31&K!z ziFcnQo|KREnmYGCZT6=+h*3WvruF{*!2WzS=-wbE31at4`FO7zP6qyqC8B2D#&CK! zAz}l^wz=`-@k8ON$%k$GFSguf)CtFjI^Es)_*U_EKUH@((J3qZ-Rn&XfpF-oq|g_$ zQ4lG=Sn~dE3e(vymT;A5T_?#Xe6e(fi=-gQD15QB^`iJn8wJXKxx_U-JD$2{phJIH zZ}JFG0Y}NBFPFO91z>yy$1j)8bpg>Ntz9Igw88a~M_>CH$|C?ehyfYNqpt;d#8;5{ zT98L1LFQ{g9_a-BKRiS7XuagoKLmM13OqxmDc>&%OjnI|5k}J ztC~B#5=Y;D+Rh}ZLmlcn>X3vw-wtz!ByD{=EFC1F&bQ@E`n+Ngd&O)Im$(C_?ZjHR z{pcB$gT9exS_t}Pkbv6}_RcZy?(h$n$tD*eE8JzwJY}uzd2hh6xsEdGw(tmgu|NdH zS8!pK4lWVKI#<^@9volXoLyUJ%r=g&pKY~{HaH?FtsTXda1$R6WL~Nz4ruXMBRghx z5-H&dD*n=JxEauGuTgQ%oEpT1fQQQ9rbi+lPag;j4|jS8qwe7Y~>GO*bSFGY^;iO*bSFGY^-z=|&U0R|@GPKf?(QKu`r@ zKvf499tjhiub}!!nBXKq^^q{ab<4z~&(H+#)dYVuOmI@*LDEfdl91uiFu^xeOFu2G zd0W|?E6^PeKP^>Gh(D`Ywm~!e)n#TG=W9nW+hp(g_?TyOlDRgX={e>@ZvM}kVvH+% z%fLFmB?&XWy6mlNl5oMR%YKTHgl}J6_EU@`Q|#4cKgCEg#a>-ziajKyW`hnJ+)|GI zTRGn@nmyC#zTs_d!+4uTwrxgIS*R0xiC0cc98mlX2cvke)(!r&PHQP4w-aQOL~<+cQhl%w5NUGVB2@F^UZ}q zNKZQsqrC{(`oZIRV03)Ko8$Bta@|t)yO<<3YhG9OyO@@y02yxA%rcmPuPd{Qc^3X+ zWl-SOa$(cJMvHB;N54{BoLk*Wxnh0hXSWRC+*&^Ir1&`=4(ZpI3w;CUCRtxhVB4Vq zJ56SHKD#mC@e+Bx1xXlC(>)7s83u|&#oAokz~|C$l%qG4ogzgV1|lOcko4Z)DEo~% zz5?wx%BQ+JdP%}Szftyg^pb>u-cT;N8@x!uKyN5-ILnlZ4U*Bfm7neo71cqE`T;RX zkmw$GiW&nZ=5Cbw&M&Dfao@Ty+%$v*J8%WZc z_nv0my>ol{nejAl(DHD5*?XEvfd@&qJdk7rZZG2vyg83Muv#tMQC{JYLR3pzB1tkKmox`F79GEXZ0l^kqU zZF#?^9Q{7B_D>aVg2IMw7i3k(b1S}!=zR8~jtd%xX#Q}XtnyafjyEwRI*c>jSlkwF zT<3Xs;8J&{HJM&gq6i`L_;S{-t#&&|0AhT~b?|@={8>i(p>SP* zI!u(-*8iOR$AQC*2748DoAfVlC4H#Y96VHeDS1N1vgX=~B){6$NluheeQ>j{vt>{v zy%>j>5cfUhSQ3w5Hf{FvQgOL3U^3Fu%w{s&rkG_gN#9%cJA;-1i1(H^o|aW%PQSOz zt5pFe1>RRKoS7Do93>-zDQ)+Cnv9anf+<=0zA_ibr5V6{Ul}igHrlGf^8Mw=J(EQW zcqE;~A&JC!f4QsAjBt{`dVhJNi!mh$r1zIO?C=&X-Bk(4KPpG(_P^HcTDAp^6A{|Z zo}Qby5?E(fn6BbtZ9v%~xu|p0On5deS%nC6!kxh;Du*_F8jy#NP!c{04WcoKQ8+-n|uY*AD1_r z={q0+|How>-*W>;)juiw$49H?Dj*pkDL(d3GMwZp_!BXKBZXWAf0D5PW$!9;S*rV} zAW@P5l7d9<%5YL#(Ou;YfnRj+`Yti3!wGiz)3Sd_uWF~JNd`!Y>HO0SC&e56X}RhG z8Yv8_SnT-fFMgjgwkz8w8sI&_kvpD&l!d$JN!R4x2%Hc~F=hzu8&*ry|I0Q}pskJnWO z5WZ0M=fy1TGTixN%b>;=%KrSZGDv^1Tv+c1!o}B+;fxb2U?n(g?gj#+zf_LCnGS>& z1~S4x$QJohSo8X9Vfd2PWH)MB7``Nd=LP~>1YiT-Oa~&Tf%tMZ5IGx&uZZqifPrTN z@zrb~auo*RYvsb%(}Bp@K>UNq9O~#A#BY=fL!R!0cmHEKdLX5{sP>QL@~NKg7AO5r z<-%r96VenQ!zm7Z3Z#Z6|D?sl_=gM%d^@8eX_pbGNCq{&T{bGhaf*v3_%@p|E>4<^ zFlETF5;n;+0YLh}a`bO0-DPT#5$G;c>%p>rvX#=b|6uuyOFiASa6VYR{JEa)YzF}B zdH8?5dNV$B1r=kTQ|FB$0Jl$OYG8rzOwa*72|FBHRfup+xApa<%y9+=jBhcLiApa<% zdtOw1G@Bu$L4u4hL&&faK7NNrq4ST`gl`goOPYZ+=Hj!MqcZ~lmfOjUQr2>D{lk$>lNmQn=oWh=arRIP!+1BS68C{N~<=F z3sQ()UGWDC`U(XJs+&_Jl%@a~E_BxlK$Dv*>_<5&+R=p9R8sj*ni^yT`A-HlUQ;po zuL;A9Aw-em0iNz|&XM8H###X@!P!_xcaXlN68&aMcWnoe5$LY%pj#^bXhENC2i;OR z#hrp73Egk0@Z`3myIyO)rE=kKrgSfe?ypO`%=JB@`>h$>$)HEC&*;u&qt()Xs}$al z(w!S*sPo23VXwm&t+Vu7mFTT0nWZiMR;8>Ci3CNF`Ar#BNmGCf=i0IYkl;-fAE2)c z3cR`E9ZQyW8Scdz%b>=aE4)|}+93UwN;>6=Hb2O4A>^u|fp|-W5OPOmkiNYV{a#9D zDZpd|GVA7{+biA$)Mqyj-CjA-sjVa-^X(PXR!3$|NdfGf-%H6{6q(yib6ZP` ziwmtC7hG@z^?Qk%hZ=Jia94@BXxxL0dWlmSrb8S^|D&?bU2)iFT-2TnU+;{tH8!Gu zR@QtKTjQ;AQF)IM7nIqh2;mh}ZHl4q2Ny1ZYtC(9Z%d_@BAGEjgkx}Y;TS<*&khRP z9WGuQRtQ|`-_UM7=ZG;g!tqH|_f}nJcY-})E~=$PLYUnRc3HY@f8TkV075tLjp!B< z9Sa9ktvQAD61p+PwGxaBe%-|>y`C1robin`6o2PDR4(>>u1-X!_SfV?5~F>rQt6L> zUp}Ob@*#bq65U_P_rFcl)1`veNm&&f5t`>tvH;tmS(7i|S9l$bqemj5_la9n-^?QC z8bgL1D6%uvA-dkY(9p-9uLDOjQ`}uVwd4pVNz=ygqje0IKkxTg{bqZBmjZxDIY!!B zM;JxnXsaIlC6^?*NV3Cwi z{A3UamPP=P;S?#$Km>iV;$!zM0}!9A_=6>uff)LvR;T?QCI#-T6fWw2ez=m!7&h=J zq@kal+x!6nTrKK{ixP3i89-SwNexIz24IGh)Z}|B>zw4a64vCVtyvXty0>z{h4BH8 z6SM9!mBRV`KmYhRoogh0Qdmia1Z0Mll*rGtW2N=uGnGx}q*zIj{7i+XC8Y>}H=^=y zv?%+*kg)ulidT;GS)f~-~s4)n$HSn zNv@6JhR;=^i(Fx|5=pwnh>!H|bCoXV5+X_4pR1g;H58_ApA*GzkV9^x`u@cV4!K2h zQRoqaxuv;qRuf(iM;o}{#-HcMudKUdj0tqu!R`Xk^JKbsBE3W?Ob=JgF8IX5V%tOM zn_Wq|Oj@>>NIsoCms}4K}9net<9jFy5%WHhrb`GcgX$P~|>}_>AWNpW| zV9MS#Eb4^#jE1D05SM+o+6mG5yQQoX;;`?k6aTot>ardSm{Q&EmJ2O!a)vd5KT;3u zKK0NyOqnCDdE!4SI185-L7)cz(wxzp;-~7Jz{W)`InP{0AlMc|Nrkep@fr{YtsGt>s@m#!4_a<319yLtHIKvs077fkrJNUvfwG zFyWLID|10GKRct)lq`peXq?4QV~ot1f>uMggTc%b)RGEdtp${#L3(rYrNlsAKi9Y3 zm#dQxFdk{=!Xh8Mz-ehu^tCXvit@fj&uF+T(o$$jjz^N_`7<}h7^)Z-VV|fm%RCRK zS6++!$&0Kk8mztCDr9bi@eVvVX|C__XQI{UO|3Cn%EFRU?YH?g4);b6hk9LZhWY39 z@Q41Ve_wxHRx{)FsZzNzVSn&`ta7bVxqf3RmRXPW#40syK#jcR;~CsD6NhD*f(j7m zKE-G>;En@czZGqyRf3p4BqWt^Q{B{@C#?W}?>+LrdWj>H)4&ln% z_LDihlNXsfm7fFXv6fDvQA?2d=zw~@MsxIDkD&;D z<-<)L;HOZ9GA3uhbsM7tb&vMWEi2u-S!tWaN7>XBKwUEvzhwSqG96c%ozGcL<|TWD z6ebP#xE3@zBNwc}RgUqOQOzWPII=avn{UmiD4;+{5l1GhwAQEvXLMA~;htqmYH$j# zXMFNstoKS_nooY!l@c^KR%3w(p49?%a`dAsTn7+NS`P{lHuGGBVt;QHPt3_;|y|Uo@a!(3BPi0YKHTEC$3$FW_`Qb>U15KYvD>qhuX+C!r$em$igvN_%PV* zNY)#>>cBn4IMFK6vcu|O>)k_oUCXLi{n_-UU*bfMVh)eoz2aO2w~8%c2hSULCA4T< z8xC_S;A3@la1}?{(S>ql@;a`0#72H86GpvC8QpWx%o{Yq?)qWr zD!3MQO%Wl9AjYM}2xo)BZocf``jj1VJVhfK zAwtyq3^stO0)C!Slwr)3rWr(6grsuc8lN>n^(w$bU>eq_@a8HrFsN3V-nFt(yn^ba zYbA9)ecr6j@weU%k{;k*=*i$iSENANEBBi<1P#pUjrDUc`FJ%sD zn-x|MyA$)>6*>s28cIk|)kvbMeWmO-*d)Twn)23L_TGXtgcPgqp2_Z)-Py8*VWvsa<#2^UlpMYaiAEX9$x>5^XjtcBJ8A zIkl^S1!_b;lSvc)&JEK!Y%xDzi}{gu?1*2}jiyX+;W*161*p^Y9`g&`c7UaG?_MbZ z2Q-ZOr_(yuc7!kf4;j!8j$|3M0=HE^8^ZoLTk$yK80R(V!+Kg%IbuHDuRA3VS}U^^ zc%d?#Z#jC|L3ws#(k#dp+P@gQs}}r}X7YlfKD|Ro2thwAQ>Q5)i`u^5;8VpX)gUde zg~JeiQrQq<@ECXKY=rSa9Hbm)64_g#;z$+z!l1?alifDwszGtu7T?7IrVVES)FBD%RdJ(^9Xr&$hY9OlLBN&AYcyiyzB0)SVHf)tt-RfpwP}|rA$oWI~n-1K)JaSRar?Y`?<;PBTpx?@mT|r~_Esfnz zuk^9whwqhfr0|uMVs7KrQQKH+eUldvxHf0QrWH|-)$`hkX)+oYh+-N!ndAbRb>55+ zwbFX0HI>uTg&GWBAwmI>N-jRe@7#f4j5KM@hx`i9YS4oUZ!0g8Lu{nwdSSgNpE{wL zoZwg(RaV*9%9@1=z9EdKv}MJfD&Ja4h1EFAkMhLAEsuV(u!HF`Ch|(PJYl4wgU!qM zW43l*&KhKDrF?H{4IIab^6U*eZp<>+vLAXcdlh&R*N8HFk$y=Jrg1cAgxGDAaJQ7{ zY{JY-oRwiSg{r27I$*s$i|5P*osmVD*Pr5~8bd1h-L<3oHf+I$8HBWmHZlydmSJ=o z3-~Hmn>GR{;&V5(+y#@)y?fovNPA0I!nOgf_l5mYN{9B8hT;bC$8Z{Y?3~sT+l)$vvW{eSxo}jVDRoiL&#UdVtChr?fjC1XTFQ+xJQ<=SM-cS1< zn8MC}RSR#nupmo#7nGh%*xK9c-On>F8K~-!pJgif5LkX@VI?3^%{M;{!5ov12P|3= zD#62QB}-as4=jY}3{xcEGRyNJO~ZK8F+9^Fkk22pecBe$D@>C@RD9Cjc< z5ws!E#zO3EiAOiDI<+cYA$!(U9~0{ZojnvrWl!ZKYQ)X7)tM+$O_e}}f=vZPGz%0_2}7qx2UqmaRLaKsC{`LsaR(7>~n8KNK6x#cfJzdYUdr zI^SiTs4hpc>HJ>VO>?C==#2M!<#oYdjs3uK^u026ojS$W52QaR*YC+NWhKE6RZLlT zevrqM^~eY1b-Fta8jWlvJ}Be9HpK+u`{nwsEQ2fn_@Rm^JJ0XuF=g2B{qj1QTLetm z0eru_Z@)tI73;Z4{=IVDm*Rq6*95KfitB}(e%o=z{Eewu`0dX9NiG7iGi3&p4WVR} z_Wlb_sU=9!_*t?}bN*eNB%)wJ#=6;>46KF}6gR`)bMQtQG`&_1L@m@WfA* zf`-YtRoISZQDV|UxfA5{wz3&={9d`VF@C@Sb+fi_KP(e!`prtuH2*Rg!wHTrK`2|= z@!R2tR8meWv}R<@3q*jsk+iPqneHl*cqYxkTsz<~UHVWnBS+O)l5Ob^%WhP_SDN|5 za&vg>Op-DCVR`#g@ByvXw)FSQ#UG2qwv;rvf4>~Bi@zj)QKZ$cA4x_15AwTGtp(X` zGAs=X#;vBMVP9oRqxlp;cyT#w%Q8TzHd?Dabq@NWaHk!fg$ZR1qPw&xFsl6kqy8w> z@9YOU@uO6~vmfa6k5c{4en7wbQL5jCA5iChB>isK`whk)mg}EtIkB+c;>Y>7SyFE! z+vF=es8|p0zY79YA-xpDYP+SE{$V=7EQi&(F1UNNA87X< zaz_Lj4AbPrAT!sl=^zc(vIsS)UQ)|v+FEf;VAC=%yfCuM6ts*hZ=yEk8ZRkE&DF+B ziv3ZZ=#pZ8RDL`dty)`Y{2!Gu;r}NoEAhi8IsMOlHY4Dr}5Ty9Sa~+ zKCJQD!`Po5n@Z3ND-PZnP#-9p-&J!X&{NHIBa@=YHgTjB93MHjP50rMSE{KE?Kw)&yhd&&LgGaDoWw&;?AV$Z82TDqbg_ zbOf$_^i69y^-zfqlb5KQO*x*fKy zx9^&(iJCgJhog8oSFKEZ-i@)=I-1qNFYm>;-`FXk>>-K5&}PzWp0N+_rfTrT&J=CR z;cNE27Zd4JOQ^<=5r02+!&gg(AF3=MGrITVRly|La)A1N%p*T7q4onT@P5pTnV+19 z*Il2vK4WGvd-Yck^+`GVwZ$Td+?vd6lJc7|Or3i2FdSZTZTRLtk1B<>(#Tpbs&D75VE0J8z6LL-Y?E_KApW(S*>EdS*h;sxTIgS zL0$skSG*xv{_dCJbt-F*kuK+2vhAVdeo0|wQ<{J0)Ai82Jo0!3kZV(J6mfUQ-fjLe zcMuP0o(^JD#Gm|^wWD}ClSM}-UiMP?0Ipl{eC$e z%Vl8xhmM(e*>p&Scs5?kJ;4=D^Dj@nMKro)!2OiNt?`2WDkrRG;*w%qECb>%IYguK zt7uQW@;aAkx<6k=@xScFo6<54F*`GB<`~O>_-YD~mT`!WK-RCN5NR1foQRvG2(#WT zqY;0_5n?YQfM?_KatGq4y?DtAs4#Oh6;NDdE~5qiu|urR74vS$)Ds8R@Vksw{AZ4_GAra1xjUWxvJ9BNmQo~FOh8OM zG(Ohd%O3^puRB^2%AQO6BSc1eUJoOe(Wt-SFeNLX!k6NWsU(m*`zYez8xGc-6EO=! z!bgStn+|C!L>>#O;gudw3m=JUe=DV0C|g6dvAEe0a|5&t68>$+^!8k+GKI+bO5Bo> zGriQFr_Hm=K>ns9rxL$E12{K6EAW@&4Jr8D>Ffpk)n!2c9fwYb>O!u#kO$(&GF{~| zrop^gTw=}o&mD11R&KzGb@#`sZDCn1OMc6NR#i)>MaVx+WMoaSjP8BgF=8(w00f7w zxTPH|1L0>Jp(!on5MtnELi}tBk(Lp}i`9je_*qj(csp8Hg1A5DP^l6&k}E3N?58Kv zotDwJ|H9E$=ZdLqISo;RHKF7CWx)OXg2s)eWrdm3f9_5bBYtu@-1%QRVyeYn2qlGg zHip51qT>2gvUQP)c8*vgge*(dsZahvP^VUVNmr-32gT&IO3%_@pLVdc8L3d}J{7O3 zQFxYy`iw)Z%Rk=88GUo@JwQY;Pe6^97<)r^H2!O|FiK5fPVg#fKAS*#5#jrSKEZ*3@? zQwF*(#_3{P^=@LCz|yEc>8R-*tws^(9;+@Xc`aaR*uUto>8g@f0$48N>H@qp)=xQB zu8^jNufI!||4UvzEupEAF1<^a{>xrEDNHQSYuX9-o7$< zb*Ym7tCtM9kvDNT8qvrsRrX(4+44xPNEN_Nd6sDQzpMpN5l#Xc-<((2wChV%|G#R);Bd>M`Q{hN0zjx=h)5(t0b z2)QCm;+5)sYKXBE#E+Z+HHA)|i62J_Eu0}I2i8mF97kK7>ng{giCar(?CG_?YvwYqA>+Jh&lGu)W^b5^WvxjU!^LuW-W z8<%kBq`Qpp3{K|WY6fTR&;z4% z+_(zj8;a6PmNvd z8R@U^_xX|Gmw2_*S2=l!kNuV2;q#Y922P(DtDG4g?CT#Ht@I4_@$1mo$iT@9W5Xk( z_$uoerD&^t=^46I>3{kBNdM?)Wq71AaPIuz0BFFE^bilSf3&?aFw{GEp>JU5bbAGa z%Fyr_50M7W4UCa*Y#4T?+QsE6!>20e`bT=t0Mm1FU~pjUl2v$WU~EX0of;mICENL) zk+Ff^3xho)c+4EZ`(=McJ?k47?H%kHIM?4tOu`}Rtn|M`e9Fq`nV!MH+KytPb#aKG z^t3+T&C1DsTG?}QuwPYC^(8<@`g_OJq3U0~5RV29VmUT?zQ1>Xzo17yWat^W)b608 z{V!fnbW9@7_w}6XIo&^6XaGw!-w14EK zf!_YnXDfrlBLAt%h0%T*($_QAV^xCzx%f^8PF@%tFv1NCjrEU=TsW`bV7nRRi%^EH z_fWRaC_Fr*-ilZHhes|6R&=ltXs=v6)6ee=^iZHyj|egfbM%hovr!?u!a!I1SsCg- zJveZ>f2g-#?HLx##evcO-3-_OZ96SYk@;c|mBJ`gEY_f143YlMu`WbrJSzjIDm{HK z0n(S|h66?ioLwk1dZG7>(;W6M#at3bo9mLe92YTuh_=@4(T}Ah+E&-D9|z=kB`K~|1kK5~>vmcw1DEuimp*!lHF6h@rKzEk9;y8{PabghdXlxCC<1ue%>9O$l z3jSWZKKxC6JDYchza#!`-)}JdysLE3epZuH7>o$`J-}c;5bbgpu0x|;%}L}Xdt-;_Z+QVh3Xo;;+ zv}fB+U_|YXy1jwg91(AGQJ1$7S94L9j~{;KqAnl5#w0pWI&2jYx-x?i z2@W_6L3yJC4nw@l=smqC?Grl!UU34*B@SNp#r9FV4Xb^^xC0 zYhqvyI}Gt2qr*Oa1bd7Q+xQh5nK(yEfmfSyFhp?V_W;ub%n^qnc4Ks;IZPu0H%3Q@ zzpPH7N*#5Wl{pv!y77B}Au3vQ)L{ta79DjM;xK|axjEqr$qoVXg8ZUU&QQ)`mw zxl)_`ye0>;2AJm@h5!uFa}Glomgu?Wtsy@!&$aF{n8%XnSP6j#`eQkm$ACHJFt~Y- zjyVj`6QX0yJ3@Y7jZ@X;O}_fD+^=D2q%Num=@ z;v@%?0CU1&5@1d^48CNe6Hej;m=jLo$CK!J7iN#=U>*nNd56JOO!U0N;A1Fy-i6uY zz&!85Y+VxdIBnMDVAcWC<1p)h>2Vmml0-dDn{~kSIBnJ^(Mg}0>vJ&cfjQ|g>w!7x zFg$3FPWseb56nrQn$1bn=L6oHgJ}k)&taN@>2nyY4x>IF@Md88e88VbqEn@#Hh@p$ zV4eWxl*2p$%qfR?0+>_HdqaL;PIVoz0pE~Br(IBN$iZv?=Cs3X0OqvAYyjr83yKZE zoOVI6F^LA8HXCy=8-W>cn2o>;ILt<12AnnV1{>@y3@EdiO%~B z-Uc?`h{SC~E z_>8r`fqBtahHXhS;xOBCFx!9`ahPquj5y3TU`8Bf8!#gd)0#wM&VH>qm{wrMyi={f zjCrS8ff;l5YXxS^*>8IiU2s9MJqNQLmlOqN{Q{lIUeuOm^g8b^!CT!|VX&Wrx`T%*(Er>;UFv zQ%s~hy;Aq@2UTZh620pE+X><;jbQ=U3F51cxD&)z9dRd!uX_J>g7~WUZ&wn%<_x_n z2eS*9*BoXSFt0hxE?{1BhTa9tYtGPZN%Xo)_qH5N8!)dsOdBw-J4_oeue)?_1Lk#? z?z@xdaw%xAyK^wRfw}B3yMejvFuQ@d+}vWExEq+un_F#q?Mb3HT-Di=gV_Vj8xFGv zm^U0|4=``IsoM;V>P*Of+vc+H?RjQQ2X% z=}e-@QV=DbIhamhCLN{|m`R7}1ZJ{%lfiTXGr48E!R$?$bIoD)0&~q_ z_5yRwMbBPfuDR&hmqgdSQ~Po-`+&LbF#CYH?lAj+x$d3X2h4Tv)cz#8;UwOlgV_(v z4Tsqe%ngUx56lfG@qS=#IElNGXv&xKt{hAkFjEfG1GvT z49twT<}fhwmZFvKND|GK_8U7L$-x`}X4YYj05j__M}V1a4l4B#U}gyjBbcK}H0R32 z(HzWCVCEd=C@^yla}=04S1yhMGv~_1Gf6b>Yt1t`m}h{QcbI2@nRl3HfSLET<{4n- zeXZ$EqPwNt)~W6sOgAuh9i|(YyAIP0%v~OJXaVg8=I-`3>(sMJbk8TlvpJY&fw|`} z&jNGLVV(u%o==8nfw||C;khJw;FICG9L#gTJaCxjfO+6B&jItmC&P2VJn+eIEQvnu zTzo7Ca}1b|JIpa)KJGBbfcdy{@iAaN?p%C4iQaLg^mq>DI56)x%yD4eahT)4yyHsg zabVtYrSwD+eZrT!6FHa@zhdBYvCw#d(0n8^@?wWM^(D0`UcAOu>M9(Mn zpDaKr+!r=JkA3)`Dl~2n2|WiW0Bc|L6rcdEC9nbiWC3bfLQen+kV{}aN&Tk^%rkxH z$%FL(#+Lxr1K6ihupYoZRbZ}J8NfbOV6qA9WK#d>0-G*( zRF!>sus*=}62STZ`)mr<2iRu|8@7frfPJ=rpj6BHllspUm^cFK&x7>?#+Lxr57_5Y zuztWkSJ+SqWdQqJf!QOlQ%U{jQ?OHcuv38XC4ij*?DHwuDZoCTf}H~F^C{Trr2Y$O zH&5rmP6Ni50CpO%FQj0n0sBJQ&C`H=A?@awr2gFkQ%T)ClLtEk7+(U|8Nl96!Oj5o zZehc=PzJDf3(O~Vb0DdIuYi;g*gzg^05HA;umQl{OZjX7u=ff`3M&KHdj&*=z|P`{ zIBnV4JlI*l_!7X*0`|oe>?~kkEHKdpb{4QNrY(CRseeBmfEV&$F961u0QLf4@26ld z0QP`=o#I zTKuG6oJ~#4cOBS!Z~tCyd}glAhX)nOn((#LzINW*-}>c3yt@AZS1)=|$c5p|wdAol z;xczb0V}4JBl~3X6)TXZ1 zMU7uX0E%a&k^}LID{@=wz1Xy<_(^@WW~y4to1DhP0W(uBUh#RxSg7J1i%QZ<&r*Yt zD|)gqUSWLI`iDH33j-~Fz$eHJqs8in@WUBCYJ`<3IT|-nhyJ`QW)lrQqzra9miGmi z@wph=*-$btrTFhd%C7X>FX>fFvOjLPWpCxI_p7@$F6#ZDeWszFF=>t)fZ%n~h85Jb za>P@u*WyO!+K<-tDqfgQj;DI&iQ-8;>JjyXRO`>M2exq>#TRqJHGpQ_WjSVcwtM8% zD=DXy;!FuwVi?U^vA}UHi@I*zQT~y#<+h@mWQO+uhZnOeQmTz6XY(9Z$dPL^fej1kB;e_p5$RKMZ+b)NsZLcH!_av?df zu!~E|liJQE$KqAD@bYnbHbU5Gb4s_K6;Yj2WUdsp}h`?m`5rfMq|HT@wCAzXECtzAAj zebqftyXt;~@?rhmGZ{C86(nhj8;6E3pXnJr^BDKzaRZhTU55^HGZg+lw7-j+B>sM( zzqhyNg~y^~Ra_n!INyJ+Z~x(aTr5%k-3GT6sIRko6&yMf3q^MaMa4a6s9S?9ilY^{ zTZkG~G{x8AdO{vxI#4REZMlTCij9LFL(4!3&wlbtQklAjkC)2T@jG(MCtbtb*vslj z&#`fWfxpmOwgMkXBkPvSw`6nU_0R%WoZy+8QD5FWf}2HLMqKDTY{!6Bm3}`Sw`LcuPin> zqO1j0l5xAWkxlxl;##6X;1g-`_Ep%gjfZ(KTHAx&>~KOSb=AKr>!geWR%JDD$Gnyv9rSh9h>S7MuHq9d6P1N4h@`H(u`?Tn`h!y; z^FDPA*IN?TV#}#%yrC(h{BxzQ9W~Hz^P;EXl1uKL$>!3lP?S9omuXIUSrF}7-|Vfy zCTMp}YtohLkKGzkBx?p$1CXq5;ygu z2r&lCMoanScLbYg>U^7FB*KY2k+a^K)Hj1)3R4&0w=giCm;JP?xFrL9b|c8H#uIaI zR+2|zS=Q)Iw9 zDMZsqnY%6b)J_T9jOxEQGqk7|#3B_+SO{@hdKZt2ID!lv)u7@GYYv^p_L`>Ow8vrW zsy=(Q$21OVITI>IB&e(8J4iHVhzs<*XE+mHrVvzuIomZVx48`TM1I$AJR zi2%KfHkbBzHM@92-kqV;{Tp)kiU)d~ErgAA{N;oYu;>-6oC3Fb!+$7=jvW8wEX=-yl6L<2{;&n*{%tHf2J}$$zzQx7TrTLTARC67VvPC z5C>PqjO}>YK)~6%4wn?Uub8Y~3!d8z8X?My3Wj^?s@Z2VSPUpXRqKlR%~*tAZK?6> zp=pvy8sQMdit2q(Mf7ucu^kMjjm4y$^ycu4*94Ct#4@UI-@fwC_99^D=~wju|~tiP5z0HPozxK&?kN4 zx=h1EiekfAF___eR!q!rJ}V~3^J@d^p_)n;i=l3>KS|SmmSFLb%9B;NPOe~hlfkSGTL4r?#>gHIy zSjDL?#2C!2d2#>=is><->6(jSM-E(PWmJG6-~L3KDAzRDgy>7&?DANX> zuS~lu9mie&Vr|XuFQ?EI%mJ@fN?+G)gQuPU)v&J79}WqK?T46Y(_4xVh@4{sD`3kA zZQzC6m z4%#fxsaZ_qb(&A&vYFCqV}1p~jb>f0lYkv(eL1k0 zxY2Brf>W+gKwQ{y)|Z1w4E|PDOmpU~teED^TTMZ5kdb0Rr+A8&a_gChNf!BWVM z2d4LE!{EA{Lb+$UscC71S<|RH-&LcWz}}fVQ+Y_mJ2>MouCpXmW^(|I=(AH|`w8w2 z9nw>bdoIdp7_Gj?82C3@eQ#|N|ML~hbVa}|)cshY(DFa8*c00ca_wXvca;%sLo1~; z_s|b|CzU8Ps52h8lNS-Fe1!QTM7@H1+S+P&2`Hh)?_w?ca05@AbW-v{n*gyScYuP@ z_JQuev7Sg*Vu%}pQjPb%4~Oa ze$32v+7#jc)mdW5ngskBZmXDf)tDOgLHhxOl;(1kOl&_OufI=}JZ+bHTM2k%6=|xQ zs+54fr80dUzm`PkBqO;+8>rWV7?ifn3&WtBhaE4?)k%{^AmC@`R6(<}WippZbtCyv zfN~O0Wn$Zq*mT!0*6?az&x~7EgC-C4+WW~xJ*PD0_&xY12@&a}3;yXsDMY@lM+#vBQCw#}cuC>ukz^@NlJ~tr zc~zQZEqt$#JkI2)XZhyi7pq0lgOzObyvQPwGT2xM0Nv1^w{C>44 z0*NHG{?}TL3BUVOg}R?+lKyQLIi~lbfN2Ej<}IhW1VzFGvkn{ZGG~se5oaqR}oAQ@3$DRAu{HZ)4_oQ6fwf6KzF!D8vx}hbF`?U_0H;G=A$Ya|y}^ zA}esEmWtWADO4S{eB*awwO^&#`BRcN;d6FW*2#;@sEl?OR_0V()F(uRyLHUo5^Tp1`g^Xgc7l0%PVek=t(blAVJl4Yw`$9cCPWSmvTT>y)NE zwS_4k0+4@QgA%%|DgX7ty7k^NQh(K3mYNeRZW-~J6#4W|+_H$4QCr$Fv>cLI%TRMj z(z3s*xY!Lz!#4|c{}saMCyGH~utN(yZkA0WU|i5F*h?rl#nk*D+w^nz{4{>*4uhj6v5L)la(H$( z8Ar5bUsg^*4*6lgVpyFEWWY>7!DeYjTQiO$>H2SqgkRT;YtWMO?E)cdzg(#F%7%hX z4d<3Ai&;X_6y_4k*{UpMNqSy;@KivsGX%2feEV=MFn&)PCWHjkc!dy}`(LZH*?|S+ zr$d`|%PGU$%`^u=w9-u8=ynT~?fgj_v8|(#VGfdnDZX83dd&EfBt80eVQq+!MiQp@ zc7brVnu85mzJ7-Od0wrHBm+P3(Y~$?Gn6Dnf2LZLIry{HqM9Tm)1m_Y*}~co3AjNA z10=IrN&Q^4sJNJ9T2x&8bJe2a;-6E|>(*fU`tyb225xMFx=^ENA=*vw2l;NR17x;^ z&_X}jd9Mv4KTmUC&^CF!69*qv8MiN^Dyz3@N?RpWwW@&Z4F>bDIHNgVg_B zUgsNwG-?9;QlS{ekrd=785&EH#{N>l;~tQt=r74)Rg#e;js2xUWgG6!i}j_X;g<__ z{{xLZTDibgf}Q2`%UZ~`BMZ)mPyskjzc<6_8~!ry&gqyxG>?0c`d?P_6ilJSK~KZ4 z6pH_SAp}z(&F5b!aMmOCE~)juTEJbn8bpfbk_^$RNs{+hWytMcY3{EU@CzP_^6}TI zMYTGS3@cg5X2!3{pga`S%=k4GJx)){Ny9%V)csF|Lf5`M`p3?Ji4Sy}la4gpOO@!R zEX-Fd=N=mXUV_*EgF@*^Ye!jK_)ekNWi^pv07!-bAW5^nQ)t?nex>Ml3YA@Hl4gCU zK(G+?sH`dZ>s2J^pk#`q4*q(i)@}cqnJ^js4v{>JiH2kwd-T#8QiPi=l#n=C{LaAb?O={3@6^cP1 zkU|8I4B~<$d4H==UTLT#LH(@)n|F;wlN$8f)uNKRB-5g5&~F#YtG%ci^xM^OC=i-OzLhsH=j>u}ggZ%0 zxTwzaOcBVHNug}s6dTscm3t5mbyT{%+bm++bF4tt(GPO>pBS!Nl{ z%T~5L(x|K?yX*gg&`-xrDLTp|2Gt@9TBTc>R_JK0JwD8szfh1Jv+|!j{;}sCVncRi zqT~9^-g`&);xX$k%4eW**MW{I{q|~GJ%9Gw<(P4+t806hdD+&?ylhm@zJhs~{VAt5 z4Ru@etIio4>vj^K$gb&|Fgk0wRjJZAvt&w-=j=Avlm<3r1L4N8*ScF7?R&v?!j+!c zS=%SGnvZ)Z>2Q6HGC?2gfRaNdh7dJ7y*O-QiL}2lq*z&$*leh;YsAHFNQTQn8tz6} zey6yaiK5fSV6e8jWF}flQA{RUf5R{6*3~r^TE1HFo`qqT;&D4%Q|sx2_SxYOm8D$D zF@sJY$D}6P5dvEaw%hy~lw)NPQ`p&s=jUP~*jOpp;PllteeZlBFb_VL7XIoRSyj~8 zDYG;Y0C5_vE3HF<`-R*3Vz|2u7ia6e$Km2^z3Dx=0aK6HV{!3|>M@ghQ{C1=%eS%~ ztD*E)?X=Ejhb?xRLJw084SE+xB$;J|R6vd_5Wm$5^d(~8z5UYZ;8LAy(Xe@C$p&K%f+;znxH{5y-z$z zovWxP6~(C55y%a;pwd=*GrHLd!}(N&1M1wNlUPjKc?)ZX3OzkD-I4o6TD49RJ+cj{ zVDrLZNBD_>umi8DbYX)R-*mf>VAg=~4Cgv_-Qq$fOs#MjM!>u3aVXc26C9{9?eg73 z{bpgctzkMG8U_uAh9wM#1jSBv6Qa3Yuk0!wHcFC!=DVEcq`1e})wC!5MzLK_OQB0I zTXr2hlr1BFzdhc;JmI`!ZhY^=G^c)ZQ+IFMDv~CA5m}#R>&&ug4O#9Y)cUzX{3N%u zFNxcYluXK(bY(Td85u`{$w2k#Ra4iI&WBdl`C$l}<|BHuMR?zczx8y>t^s_x2?;$g z*!&2Xd7`1uPbTNnCtrF7*3{Mg(b^I~Iz!*WbFr4q@d`^)Tu(n0RgUKk*$_7(AGF;x zxcZSa$@7rn&3pE!rm5>uV^D)74mO(b41=P{sd$B&b8my5-!ygYeZ)A{VhpLVDM~#} zzt=Jtm(t(zj~hOWZVBDGiUefAb%slmEkki!ts{^3p)V#MiFcCsbX-8pz1{UUvA%fm z?&RFtqa1_dj5B#9*Y2H<=zQ8SAK#W;OXl-&`&1!b|4=!Q9C!r8LvyF~9{gy9yb;&p zrW|u8yB`6P%ahi(XE|9zEjZH{i&q$zB-+MNJgEWB#RIK|kzp-!Q0mHKdPZ zVRIR+>U|^rOlU6O))rrAxxc8nv#E{^ZT>{Gb)XNWuc)@R!Hg(qu*~q+CfYfR_OjAg zyZ2CvB>rugpXr>d@XmecXw8ImC~1G*=d5E ztC?F|JJ>n+Jw=n&tu*5hnj{O5b=!BR-_(jdM|k#QJF|^a=Ra0C zA1t7CVp^qO;LGvrx)zanCl`Y=(~Ad^4anVjHHu9c<~W%AhnE}x<@RA4aW0|5g*tYL z^)2;Ju26fFqU9|sb>+;yjKGG>noXq*9ECN8B&zl%jxn<|**C3zyqcyvn>c>X_HkS4 zgC!qG^m|Kvejm4`WEAp!+!o$sFoSiVy{&GS&0yOkZ7c1tmKM~~*5XsadX5xTw-P;3 zEhVLvZm(ZwEwLmm-Ckd=rpdmY>}t7vr={Du5kHsizwmq2@_c0svf2XAUTKc=XOy+@ z3dc=BxClbE3>!qV*TziXK6V1vxK6nm#$UTM`nt1b2e;GCe$yS?PL9g|y|966uiIZ} znOW|LBVXE6V+QtMjHyfz{V&}y&;;KR1a)86i1T-66@iIHhm6h!nm6s3CLV$UY@2X_ z%8Ro$*_*OC30r*OGbOG~qT7TWT#dBwy;mm(ed3<5_v++y(NmDPlihinRX}a~s9+O2 z&PkY;oz@KVNbxq|R8uYPG~ zs*gISkfP$FaEb;9TQod_7WXkgJ$`3`d@`FvS>lTeB-gVaoE0esD|T~ss39RG6x^eE ziVC_r_j0f)%dPt3bv?QU$qAo_$Z?ZGyl=7Nfnc$r^AzVPnT8!NwGekz)-4eF`Qo~k zr)^dRtSURr-@O8RF>~f-Zr84vD#@5U?`%a1)aS8-QL9PO>gO2`u>hy2y}UW$Z?j5> zdQ0+u6KoQ&4G)%eT=)Z*T8i8Cf7It(N(#6>I4TsQCVavAiev=KKhB44+?OCFGP&K(bBb$#ft(+m0`&OviXASD;m?$m7sY{6Ex4w(j)+< z=Uic)S|EX;8B`?Gi?jmeZKFbQ7ExJ|sX3fKjLpcLL*|ItukE^MwB;+*(fZWu*#n*Q zVG2g7!QRT3RNZ(p7Q@aFc6%HRm%}*O<#9C3q1gWw2n4wKG8T9aLupGaOIeSpdJljHqV1c{v*DyhYQUaz;Ie1lyj2exF z1C1M@?+IBoqB`!i~WejMwZF?iQ-h$a%9mkJ#jF{pNS(8cTqwq{y;ZL(S2a~zwYU5c3Mq=cwI+q+#yEQG Ki4EzA^8W{UHplJ& literal 0 HcmV?d00001 diff --git a/crates/cnidarium/src/lib.rs b/crates/cnidarium/src/lib.rs new file mode 100644 index 0000000000..91c90a000c --- /dev/null +++ b/crates/cnidarium/src/lib.rs @@ -0,0 +1,91 @@ +//! Storage and management of chain state, backed by Jellyfish Merkle Trees and RocksDB. +//! +//! This crate provides a versioned, verifiable key-value store that also +//! supports lightweight, copy-on-write snapshots and transactional semantics. +//! The [`Storage`] type is a handle for an instance of a backing store, +//! implemented using RocksDB. The storage records a sequence of versioned +//! [`Snapshot`]s. The [`Snapshot`] type is a lightweight snapshot of a particular +//! version of the chain state. +//! +//! Each [`Snapshot`] instance can also be used as the basis for a copy-on-write +//! fork to build up changes before committing them to persistent storage. The +//! [`StateDelta`] type collects a group of writes, which can then be applied to +//! the (in-memory) [`StateDelta`] overlay. Finally, the changes accumulated in the +//! [`StateDelta`] instance can be committed to the persistent [`Storage`]. +//! +//! Reads are performed with the [`StateRead`] trait, implemented by both +//! [`Snapshot`] and [`StateDelta`], and reflect any currently cached writes. +//! Writes are performed with the [`StateWrite`] trait, which is only +//! implemented for [`StateDelta`]. +//! +//! The storage system provides three data stores: +//! +//! * A verifiable key-value store, with UTF-8 keys and byte values, backed by +//! the Jellyfish Merkle Tree. The JMT is a sparse merkle tree that records +//! hashed keys, so we also record an index of the keys themselves to allow +//! range queries on keys rather than key hashes. This index, however, is not +//! part of the verifiable consensus state. +//! +//! * A secondary, non-verifiable key-value store with byte keys and byte +//! values, backed directly by RocksDB. This is intended for use building +//! application-specific indexes of the verifiable consensus state. +//! +//! * A tertiary, in-memory object store. This is intended for use implementing +//! accumulators, like lists of data to be batch-processed at the end of the +//! block. The object store clones on read to prevent violations of +//! transactional semantics, so it should be used with immutable data structures +//! like those in the `im` crate that implement copy-on-write behavior +//! internally. +//! +//! The storage system also supports prefixed "substores", somewhat similar to +//! the Cosmos SDK's multistore design. Each substore has a separate JMT, whose +//! root hash is written into the base store under the prefix. This allows use +//! cases like storing IBC data in a subtree. The substore's non-verifiable +//! store is also stored in a separate RocksDB column family, allowing storage +//! optimizations. +//! +//! Remember that the chain state is a public API. Mapping from raw byte values +//! to typed data should be accomplished by means of extension traits. For +//! instance, the `penumbra_proto` crate provides an extension trait to +//! automatically (de)serialize into proto or domain types, allowing its use as +//! an object store. +//! +//! With the `rpc` feature enabled, this crate also provides a GRPC interface to +//! the key-value store using Tonic. +#![deny(clippy::unwrap_used)] +// Requires nightly. +#![cfg_attr(docsrs, feature(doc_auto_cfg))] +// We use `HashMap`s opportunistically. +#![allow(clippy::disallowed_types)] + +mod cache; +mod delta; +mod escaped_byte_slice; +mod metrics; +mod read; +mod snapshot; +mod snapshot_cache; +mod storage; +mod store; +#[cfg(test)] +mod tests; +mod utils; +mod write; +mod write_batch; + +#[cfg(feature = "metrics")] +pub use crate::metrics::register_metrics; +pub use cache::Cache; +pub use delta::{ArcStateDeltaExt, StateDelta}; +pub use escaped_byte_slice::EscapedByteSlice; +pub use jmt::{ics23_spec, RootHash}; +pub use read::StateRead; +pub use snapshot::Snapshot; +pub use storage::{Storage, TempStorage}; +pub use write::StateWrite; +pub use write_batch::StagedWriteBatch; + +pub mod future; + +#[cfg(feature = "rpc")] +pub mod rpc; diff --git a/crates/cnidarium/src/metrics.rs b/crates/cnidarium/src/metrics.rs new file mode 100644 index 0000000000..4eef4590dc --- /dev/null +++ b/crates/cnidarium/src/metrics.rs @@ -0,0 +1,33 @@ +#![cfg(feature = "metrics")] +//! Crate-specific metrics functionality. +//! +//! This module re-exports the contents of the `metrics` crate. This is +//! effectively a way to monkey-patch the functions in this module into the +//! `metrics` crate, at least from the point of view of the other code in this +//! crate. +//! +//! Code in this crate that wants to use metrics should `use crate::metrics;`, +//! so that this module shadows the `metrics` crate. +//! +//! This trick is probably good to avoid in general, because it could be +//! confusing, but in this limited case, it seems like a clean option. + +pub use metrics::*; + +/// Registers all metrics used by this crate. +pub fn register_metrics() { + describe_histogram!( + STORAGE_GET_RAW_DURATION, + Unit::Seconds, + "The duration of a get_raw request" + ); + describe_histogram!( + STORAGE_NONCONSENSUS_GET_RAW_DURATION, + Unit::Seconds, + "The duration of a nonverifiable_get_raw request" + ); +} + +pub const STORAGE_GET_RAW_DURATION: &str = "cnidarium_get_raw_duration_seconds"; +pub const STORAGE_NONCONSENSUS_GET_RAW_DURATION: &str = + "cnidarium_nonverifiable_get_raw_duration_seconds"; diff --git a/crates/cnidarium/src/read.rs b/crates/cnidarium/src/read.rs new file mode 100644 index 0000000000..30d1135432 --- /dev/null +++ b/crates/cnidarium/src/read.rs @@ -0,0 +1,247 @@ +use std::{any::Any, future::Future, ops::RangeBounds, sync::Arc}; + +use anyhow::Result; +use futures::Stream; + +/// Read access to chain state. +pub trait StateRead: Send + Sync { + type GetRawFut: Future>>> + Send + 'static; + type PrefixRawStream: Stream)>> + Send + 'static; + type PrefixKeysStream: Stream> + Send + 'static; + type NonconsensusPrefixRawStream: Stream, Vec)>> + Send + 'static; + type NonconsensusRangeRawStream: Stream, Vec)>> + Send + 'static; + + /// Gets a value from the verifiable key-value store as raw bytes. + /// + /// Users should generally prefer to use `get` or `get_proto` from an extension trait. + fn get_raw(&self, key: &str) -> Self::GetRawFut; + + /// Gets a byte value from the non-verifiable key-value store. + /// + /// This is intended for application-specific indexes of the verifiable + /// consensus state, rather than for use as a primary data storage method. + fn nonverifiable_get_raw(&self, key: &[u8]) -> Self::GetRawFut; + + /// Gets an object from the ephemeral key-object store. + /// + /// This is intended to allow application components to build up batched + /// data transactionally, ensuring that a transaction's contributions to + /// some batched data are only included if the entire transaction executed + /// successfully. This data is not persisted to the `Storage` during + /// `commit`. + /// + /// # Returns + /// + /// - `Some(&T)` if a value of type `T` was present at `key`. + /// - `None` if `key` was not present. + /// + /// # Panics + /// + /// If there *is* a value at `key` but it is not of the type requested. + fn object_get(&self, key: &'static str) -> Option; + + /// Gets the [`TypeId`] of the object stored at `key` in the ephemeral key-object store, if any + /// is present. + fn object_type(&self, key: &'static str) -> Option; + + /// Retrieve all values for keys matching a prefix from the verifiable key-value store, as raw bytes. + /// + /// Users should generally prefer to use `prefix` or `prefix_proto` from an extension trait. + fn prefix_raw(&self, prefix: &str) -> Self::PrefixRawStream; + + /// Retrieve all keys (but not values) matching a prefix from the verifiable key-value store. + fn prefix_keys(&self, prefix: &str) -> Self::PrefixKeysStream; + + /// Retrieve all values for keys matching a prefix from the non-verifiable key-value store, as raw bytes. + /// + /// Users should generally prefer to use wrapper methods in an extension trait. + fn nonverifiable_prefix_raw(&self, prefix: &[u8]) -> Self::NonconsensusPrefixRawStream; + + /// Retrieve all values for keys in a range from the non-verifiable key-value store, as raw bytes. + /// This method does not support inclusive ranges, and will return an error if passed one. + /// + /// Users should generally prefer to use wrapper methods in an extension trait. + fn nonverifiable_range_raw( + &self, + prefix: Option<&[u8]>, + range: impl RangeBounds>, + ) -> Result; +} + +impl<'a, S: StateRead + Send + Sync> StateRead for &'a S { + type GetRawFut = S::GetRawFut; + type PrefixRawStream = S::PrefixRawStream; + type PrefixKeysStream = S::PrefixKeysStream; + type NonconsensusPrefixRawStream = S::NonconsensusPrefixRawStream; + type NonconsensusRangeRawStream = S::NonconsensusRangeRawStream; + + fn get_raw(&self, key: &str) -> Self::GetRawFut { + (**self).get_raw(key) + } + + fn prefix_raw(&self, prefix: &str) -> S::PrefixRawStream { + (**self).prefix_raw(prefix) + } + + fn prefix_keys(&self, prefix: &str) -> S::PrefixKeysStream { + (**self).prefix_keys(prefix) + } + + fn nonverifiable_prefix_raw(&self, prefix: &[u8]) -> S::NonconsensusPrefixRawStream { + (**self).nonverifiable_prefix_raw(prefix) + } + + fn nonverifiable_range_raw( + &self, + prefix: Option<&[u8]>, + range: impl std::ops::RangeBounds>, + ) -> anyhow::Result { + (**self).nonverifiable_range_raw(prefix, range) + } + + fn nonverifiable_get_raw(&self, key: &[u8]) -> Self::GetRawFut { + (**self).nonverifiable_get_raw(key) + } + + fn object_get(&self, key: &'static str) -> Option { + (**self).object_get(key) + } + + fn object_type(&self, key: &'static str) -> Option { + (**self).object_type(key) + } +} + +impl<'a, S: StateRead + Send + Sync> StateRead for &'a mut S { + type GetRawFut = S::GetRawFut; + type PrefixRawStream = S::PrefixRawStream; + type PrefixKeysStream = S::PrefixKeysStream; + type NonconsensusPrefixRawStream = S::NonconsensusPrefixRawStream; + type NonconsensusRangeRawStream = S::NonconsensusRangeRawStream; + + fn get_raw(&self, key: &str) -> Self::GetRawFut { + (**self).get_raw(key) + } + + fn prefix_raw(&self, prefix: &str) -> S::PrefixRawStream { + (**self).prefix_raw(prefix) + } + + fn prefix_keys(&self, prefix: &str) -> S::PrefixKeysStream { + (**self).prefix_keys(prefix) + } + + fn nonverifiable_prefix_raw(&self, prefix: &[u8]) -> S::NonconsensusPrefixRawStream { + (**self).nonverifiable_prefix_raw(prefix) + } + + fn nonverifiable_range_raw( + &self, + prefix: Option<&[u8]>, + range: impl RangeBounds>, + ) -> Result { + (**self).nonverifiable_range_raw(prefix, range) + } + + fn nonverifiable_get_raw(&self, key: &[u8]) -> Self::GetRawFut { + (**self).nonverifiable_get_raw(key) + } + + fn object_get(&self, key: &'static str) -> Option { + (**self).object_get(key) + } + + fn object_type(&self, key: &'static str) -> Option { + (**self).object_type(key) + } +} + +impl StateRead for Arc { + type GetRawFut = S::GetRawFut; + type PrefixRawStream = S::PrefixRawStream; + type PrefixKeysStream = S::PrefixKeysStream; + type NonconsensusPrefixRawStream = S::NonconsensusPrefixRawStream; + type NonconsensusRangeRawStream = S::NonconsensusRangeRawStream; + + fn get_raw(&self, key: &str) -> Self::GetRawFut { + (**self).get_raw(key) + } + + fn prefix_raw(&self, prefix: &str) -> S::PrefixRawStream { + (**self).prefix_raw(prefix) + } + + fn prefix_keys(&self, prefix: &str) -> S::PrefixKeysStream { + (**self).prefix_keys(prefix) + } + + fn nonverifiable_prefix_raw(&self, prefix: &[u8]) -> S::NonconsensusPrefixRawStream { + (**self).nonverifiable_prefix_raw(prefix) + } + + fn nonverifiable_range_raw( + &self, + prefix: Option<&[u8]>, + range: impl RangeBounds>, + ) -> Result { + (**self).nonverifiable_range_raw(prefix, range) + } + + fn nonverifiable_get_raw(&self, key: &[u8]) -> Self::GetRawFut { + (**self).nonverifiable_get_raw(key) + } + + fn object_get(&self, key: &'static str) -> Option { + (**self).object_get(key) + } + + fn object_type(&self, key: &'static str) -> Option { + (**self).object_type(key) + } +} + +impl StateRead for () { + type GetRawFut = futures::future::Ready>>>; + type PrefixRawStream = futures::stream::Iter)>>>; + type PrefixKeysStream = futures::stream::Iter>>; + type NonconsensusPrefixRawStream = + futures::stream::Iter, Vec)>>>; + type NonconsensusRangeRawStream = + futures::stream::Iter, Vec)>>>; + + fn get_raw(&self, _key: &str) -> Self::GetRawFut { + futures::future::ready(Ok(None)) + } + + fn nonverifiable_get_raw(&self, _key: &[u8]) -> Self::GetRawFut { + futures::future::ready(Ok(None)) + } + + fn object_get(&self, _key: &'static str) -> Option { + None + } + + fn object_type(&self, _key: &'static str) -> Option { + None + } + + fn prefix_raw(&self, _prefix: &str) -> Self::PrefixRawStream { + futures::stream::iter(std::iter::empty()) + } + + fn prefix_keys(&self, _prefix: &str) -> Self::PrefixKeysStream { + futures::stream::iter(std::iter::empty()) + } + + fn nonverifiable_prefix_raw(&self, _prefix: &[u8]) -> Self::NonconsensusPrefixRawStream { + futures::stream::iter(std::iter::empty()) + } + + fn nonverifiable_range_raw( + &self, + _prefix: Option<&[u8]>, + _range: impl RangeBounds>, + ) -> Result { + Ok(futures::stream::iter(std::iter::empty())) + } +} diff --git a/crates/cnidarium/src/rpc.rs b/crates/cnidarium/src/rpc.rs new file mode 100644 index 0000000000..525c42e2e0 --- /dev/null +++ b/crates/cnidarium/src/rpc.rs @@ -0,0 +1,256 @@ +// Autogen code isn't clippy clean: +#[allow(clippy::unwrap_used)] +pub mod proto { + pub mod v1 { + include!("gen/penumbra.cnidarium.v1.rs"); + include!("gen/penumbra.cnidarium.v1.serde.rs"); + } + + // https://github.com/penumbra-zone/penumbra/issues/3038#issuecomment-1722534133 + pub const FILE_DESCRIPTOR_SET: &[u8] = include_bytes!("gen/proto_descriptor.bin.no_lfs"); +} + +pub struct Server { + storage: Storage, +} + +impl Server { + pub fn new(storage: Storage) -> Self { + Self { storage } + } +} +use std::pin::Pin; + +use crate::read::StateRead; +use crate::rpc::proto::v1::{ + key_value_response::Value as JMTValue, non_verifiable_key_value_response::Value as NVValue, + query_service_server::QueryService, watch_response as wr, KeyValueRequest, KeyValueResponse, + NonVerifiableKeyValueRequest, NonVerifiableKeyValueResponse, PrefixValueRequest, + PrefixValueResponse, WatchRequest, WatchResponse, +}; +use futures::{StreamExt, TryStreamExt}; +use regex::Regex; +use tokio_stream::wrappers::ReceiverStream; +use tonic::Status; +use tracing::instrument; + +use crate::Storage; + +#[tonic::async_trait] +impl QueryService for Server { + #[instrument(skip(self, request))] + async fn non_verifiable_key_value( + &self, + request: tonic::Request, + ) -> Result, Status> { + let state = self.storage.latest_snapshot(); + let request = request.into_inner(); + + if request.key.is_none() || request.key.as_ref().expect("key is Some").inner.is_empty() { + return Err(Status::invalid_argument("key is empty")); + } + + let key = request.key.expect("key is Some").inner; + let some_value = state + .nonverifiable_get_raw(&key) + .await + .map_err(|e| tonic::Status::internal(e.to_string()))?; + + Ok(tonic::Response::new(NonVerifiableKeyValueResponse { + value: some_value.map(|value| NVValue { value }), + })) + } + + #[instrument(skip(self, request))] + async fn key_value( + &self, + request: tonic::Request, + ) -> Result, Status> { + let state = self.storage.latest_snapshot(); + // We map the error here to avoid including `tonic` as a dependency + // in the `chain` crate, to support its compilation to wasm. + let request = request.into_inner(); + tracing::debug!(?request, "processing key_value request"); + + if request.key.is_empty() { + return Err(Status::invalid_argument("key is empty")); + } + + let (some_value, proof) = { + // Don't generate the proof if the request doesn't ask for it. + let (v, p) = if request.proof { + let (v, p) = state + .get_with_proof(request.key.into_bytes()) + .await + .map_err(|e| tonic::Status::internal(e.to_string()))?; + (v, Some(p)) + } else { + ( + state + .get_raw(&request.key) + .await + .map_err(|e| tonic::Status::internal(e.to_string()))?, + None, + ) + }; + (v, p) + }; + + Ok(tonic::Response::new(KeyValueResponse { + value: some_value.map(|value| JMTValue { value }), + proof: if request.proof { + Some(ibc_proto::ibc::core::commitment::v1::MerkleProof { + proofs: proof + .expect("proof should be present") + .proofs + .into_iter() + .map(|p| { + let mut encoded = Vec::new(); + prost::Message::encode(&p, &mut encoded).expect("able to encode proof"); + prost::Message::decode(&*encoded).expect("able to decode proof") + }) + .collect(), + }) + } else { + None + }, + })) + } + + type PrefixValueStream = + Pin> + Send>>; + + #[instrument(skip(self, request))] + async fn prefix_value( + &self, + request: tonic::Request, + ) -> Result, Status> { + let state = self.storage.latest_snapshot(); + let request = request.into_inner(); + tracing::debug!(?request); + + if request.prefix.is_empty() { + return Err(Status::invalid_argument("prefix is empty")); + } + + Ok(tonic::Response::new( + state + .prefix_raw(&request.prefix) + .map_ok(|i: (String, Vec)| { + let (key, value) = i; + PrefixValueResponse { key, value } + }) + .map_err(|e: anyhow::Error| { + tonic::Status::unavailable(format!( + "error getting prefix value from storage: {e}" + )) + }) + .boxed(), + )) + } + + type WatchStream = ReceiverStream>; + + #[instrument(skip(self, request))] + async fn watch( + &self, + request: tonic::Request, + ) -> Result, Status> { + let request = request.into_inner(); + tracing::debug!(?request); + + const MAX_REGEX_LEN: usize = 1024; + + let key_regex = match request.key_regex.as_str() { + "" => None, + _ => Some( + regex::RegexBuilder::new(&request.key_regex) + .size_limit(MAX_REGEX_LEN) + .build() + .map_err(|e| Status::invalid_argument(format!("invalid key_regex: {}", e)))?, + ), + }; + + // Use the `bytes` regex to allow matching byte strings. + let nv_key_regex = match request.nv_key_regex.as_str() { + "" => None, + _ => Some( + regex::bytes::RegexBuilder::new(&request.nv_key_regex) + .size_limit(MAX_REGEX_LEN) + .unicode(false) + .build() + .map_err(|e| { + Status::invalid_argument(format!("invalid nv_key_regex: {}", e)) + })?, + ), + }; + + let (tx, rx) = tokio::sync::mpsc::channel::>(10); + + tokio::spawn(watch_changes( + self.storage.clone(), + key_regex, + nv_key_regex, + tx, + )); + + Ok(tonic::Response::new(ReceiverStream::new(rx))) + } +} + +async fn watch_changes( + storage: Storage, + key_regex: Option, + nv_key_regex: Option, + tx: tokio::sync::mpsc::Sender>, +) -> anyhow::Result<()> { + let mut changes_rx = storage.subscribe_changes(); + while !tx.is_closed() { + // Wait for a new set of changes, reporting an error if we don't get one. + if let Err(e) = changes_rx.changed().await { + tx.send(Err(tonic::Status::internal(e.to_string()))).await?; + } + let (version, changes) = changes_rx.borrow_and_update().clone(); + + if key_regex.is_some() || nv_key_regex.is_none() { + for (key, value) in changes.unwritten_changes().iter() { + if key_regex + .as_ref() + .unwrap_or(&Regex::new(r"").expect("empty regex ok")) + .is_match(key) + { + tx.send(Ok(WatchResponse { + version, + entry: Some(wr::Entry::Kv(wr::KeyValue { + key: key.clone(), + value: value.as_ref().cloned().unwrap_or_default(), + deleted: value.is_none(), + })), + })) + .await?; + } + } + } + + if nv_key_regex.is_some() || key_regex.is_none() { + for (key, value) in changes.nonverifiable_changes().iter() { + if nv_key_regex + .as_ref() + .unwrap_or(®ex::bytes::Regex::new(r"").expect("empty regex ok")) + .is_match(key) + { + tx.send(Ok(WatchResponse { + version, + entry: Some(wr::Entry::NvKv(wr::NvKeyValue { + key: key.clone(), + value: value.as_ref().cloned().unwrap_or_default(), + deleted: value.is_none(), + })), + })) + .await?; + } + } + } + } + return Ok(()); +} diff --git a/crates/cnidarium/src/snapshot.rs b/crates/cnidarium/src/snapshot.rs new file mode 100644 index 0000000000..eb3518805a --- /dev/null +++ b/crates/cnidarium/src/snapshot.rs @@ -0,0 +1,558 @@ +use std::iter; +use std::{any::Any, sync::Arc}; + +use anyhow::Result; +use async_trait::async_trait; +use ibc_types::core::commitment::MerkleProof; +use tokio::sync::mpsc; +use tracing::Span; + +#[cfg(feature = "metrics")] +use crate::metrics; +use crate::store::multistore::{self, MultistoreCache}; +use crate::{store, StateRead}; + +mod rocks_wrapper; + +pub(crate) use rocks_wrapper::RocksDbSnapshot; + +/// A snapshot of the underlying storage at a specific state version, suitable +/// for read-only access by multiple threads, e.g., RPC calls. +/// +/// Snapshots are cheap to create and clone. Internally, they're implemented as +/// a wrapper around a [RocksDB snapshot](https://github.com/facebook/rocksdb/wiki/Snapshot) +/// with a pinned JMT version number for the snapshot. +#[derive(Clone)] +pub struct Snapshot(pub(crate) Arc); + +// We don't want to expose the `TreeReader` implementation outside of this crate. +#[derive(Debug)] +pub(crate) struct Inner { + /// Tracks the latest version of each substore, and routes keys to the correct substore. + pub(crate) multistore_cache: MultistoreCache, + /// A handle to the underlying RocksDB snapshot. + pub(crate) snapshot: Arc, + /// The version of the main JMT tree. + pub(crate) version: jmt::Version, + // Used to retrieve column family handles. + pub(crate) db: Arc, +} + +impl Snapshot { + /// Creates a new `Snapshot` with the given version and substore configs. + pub(crate) fn new( + db: Arc, + version: jmt::Version, + multistore_cache: multistore::MultistoreCache, + ) -> Self { + Self(Arc::new(Inner { + snapshot: Arc::new(RocksDbSnapshot::new(db.clone())), + version, + db, + multistore_cache, + })) + } + + pub fn version(&self) -> jmt::Version { + self.0.version + } + + /// Returns some value corresponding to the key, along with an ICS23 existence proof + /// up to the current JMT root hash. If the key is not present, returns `None` and a + /// non-existence proof. + pub async fn get_with_proof(&self, key: Vec) -> Result<(Option>, MerkleProof)> { + if key.is_empty() { + anyhow::bail!("empty keys are not allowed") + } + + let span = tracing::Span::current(); + let rocksdb_snapshot = self.0.snapshot.clone(); + let db = self.0.db.clone(); + let mut proofs = vec![]; + + let (substore_key, substore_config) = self.0.multistore_cache.config.route_key_bytes(&key); + let substore_key_bytes = substore_key.to_vec(); + let substore_version = self.substore_version(&substore_config).unwrap_or(u64::MAX); + let key_to_substore_root = substore_config.prefix.clone(); + + let substore = store::substore::SubstoreSnapshot { + config: substore_config, + rocksdb_snapshot: rocksdb_snapshot.clone(), + version: substore_version, + db: db.clone(), + }; + + let (substore_value, substore_commitment_proof) = tokio::task::spawn_blocking({ + let span = span.clone(); + move || span.in_scope(|| substore.get_with_proof(substore_key_bytes)) + }) + .await??; + + proofs.push(substore_commitment_proof); + + // in the case where we request a proof for a key that is in a substore, also get a proof from the root to the substore key. + if !key_to_substore_root.is_empty() { + let main_store_config = self.0.multistore_cache.config.main_store.clone(); + let main_version = self + .substore_version(&main_store_config) + .unwrap_or(u64::MAX); + let mainstore = store::substore::SubstoreSnapshot { + config: main_store_config, + rocksdb_snapshot, + version: main_version, + db, + }; + + let (_, main_commitment_proof) = tokio::task::spawn_blocking({ + let span = span.clone(); + move || span.in_scope(|| mainstore.get_with_proof(key_to_substore_root.into())) + }) + .await??; + + proofs.push(main_commitment_proof); + } + + Ok(( + substore_value, + MerkleProof { + proofs: proofs.clone(), + }, + )) + } + + pub fn prefix_version(&self, prefix: &str) -> Result> { + let Some(config) = self + .0 + .multistore_cache + .config + .find_substore(prefix.as_bytes()) + else { + anyhow::bail!("rquested a version for a prefix that does not exist (prefix={prefix})") + }; + + Ok(self.substore_version(&config)) + } + + /// Returns the root hash of the subtree corresponding to the given prefix. + /// If the prefix is empty, the root hash of the main tree is returned. + /// + /// # Errors + /// Returns an error if the supplied prefix does not correspond to a known substore. + pub async fn prefix_root_hash(&self, prefix: &str) -> Result { + let span = tracing::Span::current(); + let rocksdb_snapshot = self.0.snapshot.clone(); + let db = self.0.db.clone(); + + let Some(config) = self + .0 + .multistore_cache + .config + .find_substore(prefix.as_bytes()) + else { + anyhow::bail!("requested a root for a substore that does not exist (prefix={prefix})") + }; + + let version = self + .substore_version(&config) + .expect("the substore exists and has been initialized"); + + let substore = store::substore::SubstoreSnapshot { + config, + rocksdb_snapshot, + version, + db, + }; + + tracing::debug!( + prefix = substore.config.prefix, + version = substore.version, + "fetching root hash for substore" + ); + + tokio::task::spawn_blocking(move || span.in_scope(|| substore.root_hash())).await? + } + + pub async fn root_hash(&self) -> Result { + self.prefix_root_hash("").await + } + + pub(crate) fn substore_version( + &self, + prefix: &Arc, + ) -> Option { + self.0.multistore_cache.get_version(prefix) + } +} + +#[async_trait] +impl StateRead for Snapshot { + type GetRawFut = crate::future::SnapshotFuture; + type PrefixRawStream = + tokio_stream::wrappers::ReceiverStream)>>; + type PrefixKeysStream = tokio_stream::wrappers::ReceiverStream>; + type NonconsensusPrefixRawStream = + tokio_stream::wrappers::ReceiverStream, Vec)>>; + type NonconsensusRangeRawStream = + tokio_stream::wrappers::ReceiverStream, Vec)>>; + + /// Fetch a key from the JMT. + fn get_raw(&self, key: &str) -> Self::GetRawFut { + let span = Span::current(); + let (key, config) = self.0.multistore_cache.config.route_key_str(key); + + let rocksdb_snapshot = self.0.snapshot.clone(); + let db = self.0.db.clone(); + + let version = self + .substore_version(&config) + .expect("the substore exists and has been initialized"); + + let substore = store::substore::SubstoreSnapshot { + config, + rocksdb_snapshot, + version, + db, + }; + let key_hash = jmt::KeyHash::with::(key); + + crate::future::SnapshotFuture(tokio::task::spawn_blocking(move || { + span.in_scope(|| { + let _start = std::time::Instant::now(); + let rsp = substore.get_jmt(key_hash); + #[cfg(feature = "metrics")] + metrics::histogram!(metrics::STORAGE_GET_RAW_DURATION).record(_start.elapsed()); + rsp + }) + })) + } + + /// Fetch a key from nonverifiable storage. + fn nonverifiable_get_raw(&self, key: &[u8]) -> Self::GetRawFut { + let span = Span::current(); + let (key, config) = self.0.multistore_cache.config.route_key_bytes(key); + + let rocksdb_snapshot = self.0.snapshot.clone(); + let db = self.0.db.clone(); + + let version = self + .substore_version(&config) + .expect("the substore exists and has been initialized"); + + let substore = store::substore::SubstoreSnapshot { + config, + rocksdb_snapshot, + version, + db, + }; + let key: Vec = key.to_vec(); + + crate::future::SnapshotFuture(tokio::task::spawn_blocking(move || { + span.in_scope(|| { + let _start = std::time::Instant::now(); + + let cf_nonverifiable = substore.config.cf_nonverifiable(&substore.db); + let rsp = substore + .rocksdb_snapshot + .get_cf(cf_nonverifiable, key) + .map_err(Into::into); + #[cfg(feature = "metrics")] + metrics::histogram!(metrics::STORAGE_NONCONSENSUS_GET_RAW_DURATION) + .record(_start.elapsed()); + rsp + }) + })) + } + + /// Returns a stream of all key-value pairs with the given prefix. + fn prefix_raw(&self, prefix: &str) -> Self::PrefixRawStream { + let span = Span::current(); + + let rocksdb_snapshot = self.0.snapshot.clone(); + let db = self.0.db.clone(); + + let (prefix_truncated, config) = self.0.multistore_cache.config.match_prefix_str(prefix); + tracing::trace!(substore_key = prefix_truncated, substore_prefix = config.prefix, prefix_supplied = ?prefix, "matched prefix, fetching substore"); + let substore_prefix = config.prefix.clone(); + + let version = self + .substore_version(&config) + .expect("the substore exists and has been initialized"); + + let substore = store::substore::SubstoreSnapshot { + config, + rocksdb_snapshot, + version, + db, + }; + + let mut options = rocksdb::ReadOptions::default(); + options.set_iterate_range(rocksdb::PrefixRange(prefix_truncated.as_bytes())); + let mode = rocksdb::IteratorMode::Start; + let (tx_prefix_item, rx_prefix_query) = mpsc::channel(10); + + // Since the JMT keys are hashed, we can't use a prefix iterator directly. + // We need to first prefix range the key preimages column family, then use the hashed matches to fetch the values + // from the JMT column family. + tokio::task::spawn_blocking(move || { + span.in_scope(|| { + let cf_jmt_keys = substore.config.cf_jmt_keys(&substore.db); + let jmt_keys_iterator = + substore + .rocksdb_snapshot + .iterator_cf_opt(cf_jmt_keys, options, mode); + + for tuple in jmt_keys_iterator { + // For each key that matches the prefix, fetch the value from the JMT column family. + let (key_preimage, _) = tuple?; + let substore_key = std::str::from_utf8(key_preimage.as_ref()) + .expect("saved jmt keys are utf-8 strings"); + let key_hash = jmt::KeyHash::with::(substore_key.as_bytes()); + + let full_key = if substore_prefix.is_empty() { + substore_key.to_string() + } else { + format!("{substore_prefix}/{substore_key}").to_string() + }; + + let v = substore + .get_jmt(key_hash)? + .expect("keys in jmt_keys should have a corresponding value in jmt"); + + tx_prefix_item.blocking_send(Ok((full_key, v)))?; + } + anyhow::Ok(()) + }) + }); + + tokio_stream::wrappers::ReceiverStream::new(rx_prefix_query) + } + + // NOTE: this implementation is almost the same as the above, but without + // fetching the values. not totally clear if this could be combined, or if that would + // be better overall. + fn prefix_keys(&self, prefix: &str) -> Self::PrefixKeysStream { + let span = Span::current(); + + let rocksdb_snapshot = self.0.snapshot.clone(); + let db = self.0.db.clone(); + + let (prefix_truncated, config) = self.0.multistore_cache.config.match_prefix_str(prefix); + + let version = self + .substore_version(&config) + .expect("the substore exists and has been initialized"); + + let substore = store::substore::SubstoreSnapshot { + config, + rocksdb_snapshot, + version, + db, + }; + + let mut options = rocksdb::ReadOptions::default(); + options.set_iterate_range(rocksdb::PrefixRange(prefix_truncated.as_bytes())); + let mode = rocksdb::IteratorMode::Start; + let (tx_prefix_keys, rx_prefix_keys) = mpsc::channel(10); + + tokio::task::spawn_blocking(move || { + span.in_scope(|| { + let cf_jmt_keys = substore.config.cf_jmt_keys(&substore.db); + let iter = substore + .rocksdb_snapshot + .iterator_cf_opt(cf_jmt_keys, options, mode); + + let substore_prefix = &substore.config.prefix; + + for key_and_keyhash in iter { + let (raw_preimage, _) = key_and_keyhash?; + let preimage = std::str::from_utf8(raw_preimage.as_ref()) + .expect("saved jmt keys are utf-8 strings"); + + let full_key = if substore_prefix.is_empty() { + preimage.to_string() + } else { + format!("{substore_prefix}/{preimage}").to_string() + }; + + tx_prefix_keys.blocking_send(Ok(full_key))?; + } + anyhow::Ok(()) + }) + }); + + tokio_stream::wrappers::ReceiverStream::new(rx_prefix_keys) + } + + /// Returns a stream of all key-value pairs with the given prefix, from nonverifiable storage. + fn nonverifiable_prefix_raw(&self, prefix: &[u8]) -> Self::NonconsensusPrefixRawStream { + let span = Span::current(); + let rocksdb_snapshot = self.0.snapshot.clone(); + let db = self.0.db.clone(); + + let (truncated_prefix, config) = self.0.multistore_cache.config.match_prefix_bytes(prefix); + tracing::trace!(substore_key = ?truncated_prefix, substore_prefix = config.prefix, prefix_supplied = ?prefix, "matched prefix, fetching substore"); + let version = self + .substore_version(&config) + .expect("the substore exists and has been initialized"); + + let substore = store::substore::SubstoreSnapshot { + config, + rocksdb_snapshot, + version, + db, + }; + + let mut options = rocksdb::ReadOptions::default(); + options.set_iterate_range(rocksdb::PrefixRange(truncated_prefix)); + let mode = rocksdb::IteratorMode::Start; + + let (tx_prefix_query, rx_prefix_query) = mpsc::channel(10); + + tokio::task::spawn_blocking(move || { + span.in_scope(|| { + let cf_nonverifiable = substore.config.cf_nonverifiable(&substore.db); + let iter = + substore + .rocksdb_snapshot + .iterator_cf_opt(cf_nonverifiable, options, mode); + let substore_prefix = substore.config.prefix.as_bytes().to_vec(); + for i in iter { + let (boxed_key, boxed_value) = i?; + let key: Vec = boxed_key.into(); + let value: Vec = boxed_value.into(); + + // Costly to do on every iteration, but this should be dwarfed by the + // context switch to the tokio runtime. + let mut full_key: Vec = vec![]; + if substore_prefix.is_empty() { + full_key.extend(key); + } else { + full_key.extend(substore_prefix.clone()); + full_key.extend(iter::once(b'/')); + full_key.extend(key); + } + + tx_prefix_query.blocking_send(Ok((full_key, value)))?; + } + anyhow::Ok(()) + }) + }); + + tokio_stream::wrappers::ReceiverStream::new(rx_prefix_query) + } + + /// Returns a stream of all key-value pairs with the given prefix, and range + /// from nonverifiable storage. + /// **Important**: Only supports range queries over the main store. + fn nonverifiable_range_raw( + &self, + prefix: Option<&[u8]>, + range: impl std::ops::RangeBounds>, + ) -> anyhow::Result { + let span = Span::current(); + let rocksdb_snapshot = self.0.snapshot.clone(); + let db = self.0.db.clone(); + + let (prefix, config) = self + .0 + .multistore_cache + .config + .route_key_bytes(prefix.unwrap_or_default()); + + let version = self + .substore_version(&config) + .expect("the substore exists and has been initialized"); + + let substore = store::substore::SubstoreSnapshot { + config, + rocksdb_snapshot, + version, + db, + }; + + let (_range, (start, end)) = crate::utils::convert_bounds(range)?; + let mut options = rocksdb::ReadOptions::default(); + + let (start, end) = (start.unwrap_or_default(), end.unwrap_or_default()); + let end_is_empty = end.is_empty(); + + let mut prefix_start = Vec::with_capacity(prefix.len() + start.len()); + let mut prefix_end = Vec::with_capacity(prefix.len() + end.len()); + + prefix_start.extend(prefix); + prefix_start.extend(start); + prefix_end.extend(prefix); + prefix_end.extend(end); + + tracing::debug!( + ?prefix_start, + ?prefix_end, + ?prefix, + "nonverifiable_range_raw" + ); + + options.set_iterate_lower_bound(prefix_start); + + // Our range queries implementation relies on forward iteration, which + // means that if the upper key is unbounded and a prefix has been set + // we cannot set the upper bound to the prefix. This is because the + // prefix is used as a lower bound for the iterator, and the upper bound + // is used to stop the iteration. + // If we set the upper bound to the prefix, we would get a range consisting of: + // ``` + // "compactblock/001" to "compactblock/" + // ``` + // which would not return anything. + if !end_is_empty { + options.set_iterate_upper_bound(prefix_end); + } + + let mode = rocksdb::IteratorMode::Start; + let prefix = prefix.to_vec(); + + let (tx, rx) = mpsc::channel::, Vec)>>(10); + tokio::task::spawn_blocking(move || { + span.in_scope(|| { + let cf_nonverifiable = substore.config.cf_nonverifiable(&substore.db); + let iter = + substore + .rocksdb_snapshot + .iterator_cf_opt(cf_nonverifiable, options, mode); + + for i in iter { + let (key, value) = i?; + + // This is a bit of a hack, but RocksDB doesn't let us express the "prefixed range-queries", + // that we want to support. In particular, we want to be able to do a prefix query that starts + // at a particular key, and does not have an upper bound. Since we can't create an iterator that + // cover this range, we have to filter out the keys that don't match the prefix. + if !prefix.is_empty() && !key.starts_with(&prefix) { + break; + } + tx.blocking_send(Ok((key.into(), value.into())))?; + } + Ok::<(), anyhow::Error>(()) + }) + }); + + Ok(tokio_stream::wrappers::ReceiverStream::new(rx)) + } + + fn object_get(&self, _key: &str) -> Option { + // No-op -- this will never be called internally, and `Snapshot` is not exposed in public API + None + } + + fn object_type(&self, _key: &str) -> Option { + // No-op -- this will never be called internally, and `Snapshot` is not exposed in public API + None + } +} + +impl std::fmt::Debug for Snapshot { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Snapshot") + .field("version", &self.0.version) + .finish_non_exhaustive() + } +} diff --git a/crates/cnidarium/src/snapshot/rocks_wrapper.rs b/crates/cnidarium/src/snapshot/rocks_wrapper.rs new file mode 100644 index 0000000000..a7e47fe314 --- /dev/null +++ b/crates/cnidarium/src/snapshot/rocks_wrapper.rs @@ -0,0 +1,83 @@ +use std::fmt::{Debug, Formatter}; +use std::ops::Deref; +use std::sync::Arc; + +/// A wrapper type that acts as a `rocksdb::Snapshot` of an `Arc`'d database +/// handle. +/// +/// This works around a limitation of the `rocksdb` API: the `rocksdb::Snapshot` +/// can only take a borrowed database handle, not an `Arc`'d one, so the +/// lifetime of the `rocksdb::Snapshot` is bound to the lifetime of the borrowed +/// handle. Instead, this wrapper type bundles an `Arc`'d handle together with +/// the `rocksdb::Snapshot`, so that the database is guaranteed to live at least +/// as long as any snapshot of it. +pub struct RocksDbSnapshot { + /// The snapshot itself. It's not really `'static`, so it's on us to ensure + /// that the database stays live as long as the snapshot does. + inner: rocksdb::Snapshot<'static>, + /// The raw pointer form of the Arc we use to guarantee the database + /// lives at least as long as the snapshot. We create this from the Arc + /// in the constructor, pass it to the snapshot on creation, and then + /// convert it back into an Arc in the drop impl to decrement the refcount. + /// + /// Arc::into_raw consumes the Arc instance but does not decrement the + /// refcount. This means that we cannot accidentally drop the Arc while + /// using the raw pointer. Instead, we must explicitly convert the raw + /// pointer back into an Arc when we're finished using it, and only then + /// drop it. + raw_db: *const rocksdb::DB, +} + +impl Debug for RocksDbSnapshot { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.debug_struct("RocksDbSnapshot").finish() + } +} + +// Safety requires that the inner snapshot instance must never live longer than +// the wrapper. We're assured that this is the case, because we only return a +// borrow of the inner snapshot, and because `rocksdb::Snapshot` is neither +// `Copy` nor `Clone`. +// +// We're also reasonably certain that the upstream crate will not add such an +// implementation in the future, because its drop impl is used to make the FFI +// call that discards the in-memory snapshot, so it would not be safe to add +// such an implementation. +impl Deref for RocksDbSnapshot { + type Target = rocksdb::Snapshot<'static>; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +impl RocksDbSnapshot { + /// Creates a new snapshot of the given `db`. + pub fn new(db: Arc) -> Self { + // First, convert the Arc into a raw pointer. + let raw_db = Arc::into_raw(db); + // Next, use the raw pointer to construct a &DB instance with a fake + // 'static lifetime, and use that instance to construct the inner + // Snapshot. + let static_db: &'static rocksdb::DB = unsafe { &*raw_db }; + let inner = rocksdb::Snapshot::new(static_db); + + Self { inner, raw_db } + } +} + +impl Drop for RocksDbSnapshot { + fn drop(&mut self) { + // Now that we know we're finished with the `Snapshot`, we can + // reconstruct the `Arc` and drop it, to decrement the DB refcount. + unsafe { + let db = Arc::from_raw(self.raw_db); + std::mem::drop(db); + } + } +} + +/// The `Send` implementation is safe because the `rocksdb::Snapshot` is `Send`. +unsafe impl Send for RocksDbSnapshot {} +/// The `Sync` implementation is safe because the `rocksdb::Snapshot` is `Sync`. +unsafe impl Sync for RocksDbSnapshot {} diff --git a/crates/cnidarium/src/snapshot_cache.rs b/crates/cnidarium/src/snapshot_cache.rs new file mode 100644 index 0000000000..236d40a91a --- /dev/null +++ b/crates/cnidarium/src/snapshot_cache.rs @@ -0,0 +1,228 @@ +use crate::Snapshot; +use std::{cmp, collections::VecDeque}; + +/// A circular cache for storing [`Snapshot`]s. +/// +/// # Usage +/// +/// [`Snapshot`]s are inserted in the cache using the [`push`] or [`try_push`] +/// methods. If the cache is full, the oldest entry will be evicted to make space +/// for the newer entry. +/// +/// # Constraints +/// +/// [`Snapshot`]s must be inserted sequentially relative to their [`jmt::Version`] +/// numbers, and have consecutive version numbers. +pub struct SnapshotCache { + /// A sequence of increasingly recent [`Snapshot`]s. + cache: VecDeque, + /// The max length and capacity of the cache. + max_size: usize, +} + +impl SnapshotCache { + /// Creates a [`SnapshotCache`] with `max_size` capacity, and inserts an initial `Snapshot` in + /// it. If the specified capacity is zero, the cache will default to having size 1. + pub fn new(initial: Snapshot, max_size: usize) -> Self { + let max_size = cmp::max(max_size, 1); + let mut cache = VecDeque::with_capacity(max_size); + cache.push_front(initial); + + Self { cache, max_size } + } + + /// Attempts to insert a [`Snapshot`] entry into the cache. If the cache is full, the oldest + /// entry will be evicted to make space. + /// + /// [`Snapshot`]s must be inserted sequentially relative to their `jmt::Version`s and have + /// consecutive version numbers. + /// + /// ## Errors + /// + /// The method will return an error if the supplied `snapshot` has a version number that is: + /// + /// - stale i.e. older than the latest snapshot + /// + /// - skipping a version i.e. the difference between their version numbers is greater than 1 + pub fn try_push(&mut self, snapshot: Snapshot) -> anyhow::Result<()> { + let latest_version = self.latest().version(); + if latest_version.wrapping_add(1) != snapshot.version() { + anyhow::bail!("snapshot_cache: trying to insert stale snapshots."); + } + + if self.cache.len() >= self.max_size { + self.cache.pop_back(); + } + + self.cache.push_front(snapshot); + Ok(()) + } + + /// Returns the latest inserted `Snapshot`. + pub fn latest(&self) -> Snapshot { + self.cache + .front() + .map(Clone::clone) + .expect("snapshot_cache cannot be empty") + } + + /// Attempts to fetch a [`Snapshot`] with a matching `jmt::Version`, and returns `None` if none + /// was found. + pub fn get(&self, version: jmt::Version) -> Option { + let latest_version = self.latest().version(); + // We compute the offset assuming that snapshot entries are cached + // such that the delta between entries is always 1. + let offset = latest_version.wrapping_sub(version) as usize; + self.cache + .get(offset) + .map(Clone::clone) + .filter(|s| s.version() == version) + } + + /// Empties the cache. + pub fn clear(&mut self) { + self.cache.clear(); + } +} + +#[cfg(test)] +mod test { + + use crate::snapshot::Snapshot; + use crate::snapshot_cache::SnapshotCache; + use crate::storage::Storage; + use crate::store::multistore::MultistoreCache; + + async fn create_storage_instance() -> Storage { + use tempfile::tempdir; + // create a storage backend for testing + let dir = tempdir().expect("unable to create tempdir"); + let file_path = dir.path().join("snapshot-cache-testing.db"); + + Storage::load(file_path, vec![]) + .await + .expect("unable to load storage") + } + + #[tokio::test] + /// `SnapshotCache` constructed with zero capacity instead defaults to one. + async fn fail_zero_capacity() { + let storage = create_storage_instance().await; + let db = storage.db(); + let snapshot = storage.latest_snapshot(); + let mut cache = SnapshotCache::new(snapshot, 0); + + // Check that the cache has a capacity at least 1 + assert!(cache.get(u64::MAX).is_some()); + let new_snapshot = Snapshot::new(db, 0, MultistoreCache::default()); + cache + .try_push(new_snapshot) + .expect("should not fail to insert a new entry"); + + // Check that the cache has a capacity of exactly 1 + assert!(cache.get(u64::MAX).is_none()); + assert!(cache.get(0).is_some()); + } + + #[tokio::test] + /// Fails to insert snapshot entries that are older than the latest' + async fn fail_insert_stale_snapshot() { + let storage = create_storage_instance().await; + let db_handle = storage.db(); + let snapshot = storage.latest_snapshot(); + let mut cache = SnapshotCache::new(snapshot, 1); + let stale_snapshot = Snapshot::new(db_handle, 1, MultistoreCache::default()); + cache + .try_push(stale_snapshot) + .expect_err("should fail to insert a stale entry in the snapshot cache"); + } + + #[tokio::test] + /// Fails to insert snapshot entries that have a version gap. + async fn fail_insert_gapped_snapshot() { + let storage = create_storage_instance().await; + let db_handle = storage.db(); + let snapshot = Snapshot::new(db_handle.clone(), 0, MultistoreCache::default()); + let mut cache = SnapshotCache::new(snapshot, 2); + let snapshot = Snapshot::new(db_handle, 2, MultistoreCache::default()); + cache + .try_push(snapshot) + .expect_err("should fail to insert snapshot with skipped version number"); + } + + #[tokio::test] + /// Checks that we handle pre-genesis `jmt::Version` correctly. + async fn cache_manage_pre_genesis() { + let storage = create_storage_instance().await; + let db_handle = storage.db(); + let snapshot = storage.latest_snapshot(); + + // Create a cache of size 10, populated with one entry with version: u64::MAX + let mut cache = SnapshotCache::new(snapshot, 10); + + // Fill the entire cache by inserting 9 more entries. + for i in 0..9 { + let snapshot = Snapshot::new(db_handle.clone(), i, MultistoreCache::default()); + cache + .try_push(snapshot) + .expect("should not fail to insert a new entry"); + } + + // The cache is full, check that the oldest entry is still in the cache. + assert!(cache.get(u64::MAX).is_some()); + + // Push another snapshot in the cache, this should cause eviction of the oldest entry + // alone. + let new_snapshot = Snapshot::new(db_handle, 9, MultistoreCache::default()); + cache + .try_push(new_snapshot) + .expect("should not fail to insert a new entry"); + + // Check that the pre-genesis entry has been evicted! + assert!(cache.get(u64::MAX).is_none()); + + // Check that all the other entries are still in the cache. + for i in 0..10 { + assert!(cache.get(i).is_some()); + } + } + + #[tokio::test] + /// Checks that inserting on a full cache exclusively evicts the oldest snapshots. + async fn drop_oldest_snapshot() { + let storage = create_storage_instance().await; + let db_handle = storage.db(); + let snapshot = Snapshot::new(db_handle.clone(), 0, MultistoreCache::default()); + + // Create a cache of size 10, populated with a snapshot at version 0. + let mut cache = SnapshotCache::new(snapshot, 10); + + // Saturate the cache by inserting 9 more entries. + for i in 1..10 { + let snapshot = Snapshot::new(db_handle.clone(), i, MultistoreCache::default()); + cache + .try_push(snapshot) + .expect("should be able to insert new entries") + } + + // Check that the oldest value is still present: + assert!(cache.get(0).is_some()); + + // Insert a new value that should overflow the cache. + let snapshot = Snapshot::new(db_handle, 10, MultistoreCache::default()); + cache + .try_push(snapshot) + .expect("should be able to insert a new entry"); + + // Check that the oldest value has been dropped. + assert!(cache.get(0).is_none()); + + // Check that the front of the cache is the latest inserted snapshot. + assert_eq!(cache.latest().version(), 10); + + // Check that all the other snapshots are still present in the cache. + for i in 1..11 { + assert!(cache.get(i).is_some()); + } + } +} diff --git a/crates/cnidarium/src/storage.rs b/crates/cnidarium/src/storage.rs new file mode 100644 index 0000000000..30ccf67438 --- /dev/null +++ b/crates/cnidarium/src/storage.rs @@ -0,0 +1,611 @@ +use std::{path::PathBuf, sync::Arc}; + +use anyhow::{bail, ensure, Result}; +use parking_lot::RwLock; +use rocksdb::{Options, DB}; +use std::collections::HashMap; +use tokio::sync::watch; +use tracing::Span; + +use crate::{ + cache::Cache, + snapshot::Snapshot, + store::{ + multistore::{self, MultistoreConfig}, + substore::{SubstoreConfig, SubstoreSnapshot, SubstoreStorage}, + }, +}; +use crate::{snapshot_cache::SnapshotCache, StagedWriteBatch, StateDelta}; + +mod temp; +pub use temp::TempStorage; + +/// A handle for a storage instance, backed by RocksDB. +/// +/// The handle is cheaply clonable; all clones share the same backing data store. +#[derive(Clone)] +pub struct Storage(Arc); + +impl std::fmt::Debug for Storage { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Storage").finish_non_exhaustive() + } +} + +// A private inner element to prevent the `TreeWriter` implementation +// from leaking outside of this crate. +struct Inner { + dispatcher_tx: watch::Sender<(Snapshot, (jmt::Version, Arc))>, + snapshot_rx: watch::Receiver, + changes_rx: watch::Receiver<(jmt::Version, Arc)>, + snapshots: RwLock, + multistore_config: MultistoreConfig, + /// A handle to the dispatcher task. + /// This is used by `Storage::release` to wait for the task to terminate. + jh_dispatcher: Option>, + db: Arc, +} + +impl Storage { + /// Loads a storage instance from the given path, initializing it if necessary. + pub async fn load(path: PathBuf, default_prefixes: Vec) -> Result { + let span = Span::current(); + let db_path = path.clone(); + // initializing main storage instance. + let prefixes = tokio::task::spawn_blocking(move || { + span.in_scope(|| { + let mut opts = Options::default(); + opts.create_if_missing(true); + opts.create_missing_column_families(true); + tracing::info!(?path, "opening rocksdb config column"); + + // Hack(erwan): RocksDB requires us to specify all the column families + // that we want to use upfront. This is problematic when we are initializing + // a new database, because the call to `DBCommon::list_cf` will fail + // if the database manifest is not found. To work around this, we ignore + // the error and assume that the database is empty. + // Tracked in: https://github.com/rust-rocksdb/rust-rocksdb/issues/608 + let mut columns = DB::list_cf(&opts, path.clone()).unwrap_or_default(); + if columns.is_empty() { + columns.push("config".to_string()); + } + + let db = DB::open_cf(&opts, path, columns).expect("can open database"); + let cf_config = db + .cf_handle("config") + .expect("config column family is created if missing"); + let config_iter = db.iterator_cf(cf_config, rocksdb::IteratorMode::Start); + let mut prefixes = Vec::new(); + tracing::info!("reading prefixes from config column family"); + for i in config_iter { + let (key, _) = i.expect("can read from iterator"); + prefixes.push(String::from_utf8(key.to_vec()).expect("prefix is utf8")); + } + + for prefix in default_prefixes { + if !prefixes.contains(&prefix) { + db.put_cf(cf_config, prefix.as_bytes(), b"") + .expect("can write to db"); + prefixes.push(prefix); + } + } + + std::mem::drop(db); + prefixes + }) + }) + .await?; + + Storage::init(db_path, prefixes).await + } + + /// Initializes a new storage instance at the given path. Takes a list of default prefixes + /// to initialize the storage configuration with. + /// Here is a high-level overview of the initialization process: + /// 1. Create a new RocksDB instance at the given path. + /// 2. Read the prefix list and create a [`SubstoreConfig`] for each prefix. + /// 3. Create a new [`MultistoreConfig`] from supplied prefixes. + /// 4. Initialize the substore cache with the latest version of each substore. + /// 5. Spawn a dispatcher task that forwards new snapshots to subscribers. + pub async fn init(path: PathBuf, prefixes: Vec) -> Result { + let span = Span::current(); + + tokio::task + ::spawn_blocking(move || { + span.in_scope(|| { + let mut substore_configs = Vec::new(); + tracing::info!("initializing global store config"); + let main_store = Arc::new(SubstoreConfig::new("")); + for substore_prefix in prefixes { + tracing::info!(prefix = ?substore_prefix, "creating substore config for prefix"); + if substore_prefix.is_empty() { + bail!("the empty prefix is reserved") + } + substore_configs.push(Arc::new(SubstoreConfig::new(substore_prefix))); + } + + let multistore_config = MultistoreConfig { + main_store: main_store.clone(), + substores: substore_configs.clone(), + }; + + let mut substore_columns: Vec<&String> = substore_configs + .iter() + .flat_map(|config| config.columns()) + .collect(); + let mut columns: Vec<&String> = main_store.columns().collect(); + columns.append(&mut substore_columns); + + tracing::info!(?path, "opening rocksdb"); + let cf_config_string = "config".to_string(); + // RocksDB setup: define options, collect all the columns, and open the database. + // Each substore defines a prefix and its own set of columns. + // See [`crate::store::SubstoreConfig`] for more details. + let mut opts = Options::default(); + opts.create_if_missing(true); + opts.create_missing_column_families(true); + columns.push(&cf_config_string); + + let db = DB::open_cf(&opts, path, columns)?; + let shared_db = Arc::new(db); + + // Initialize the substore cache with the latest version of each substore. + // Note: for compatibility reasons with Tendermint/CometBFT, we set the "pre-genesis" + // jmt version to be u64::MAX, corresponding to -1 mod 2^64. + let jmt_version = main_store + .latest_version_from_db(&shared_db)? + .unwrap_or(u64::MAX); + + let mut multistore_cache = + multistore::MultistoreCache::from_config(multistore_config.clone()); + + for substore_config in substore_configs { + let substore_version = substore_config + .latest_version_from_db(&shared_db)? + .unwrap_or(u64::MAX); + + multistore_cache.set_version(substore_config.clone(), substore_version); + tracing::debug!( + substore_prefix = ?substore_config.prefix, + ?substore_version, + "initializing substore" + ); + } + + multistore_cache.set_version(main_store, jmt_version); + tracing::debug!(?jmt_version, "initializing main store"); + + let latest_snapshot = + Snapshot::new(shared_db.clone(), jmt_version, multistore_cache); + + // A concurrent-safe ring buffer of the latest 10 snapshots. + let snapshots = RwLock::new(SnapshotCache::new(latest_snapshot.clone(), 10)); + + // Setup a dispatcher task that acts as an intermediary between the storage + // and the rest of the system. Its purpose is to forward new snapshots to + // subscribers. + // + // If we were to send snapshots directly to subscribers, a slow subscriber could + // hold a lock on the watch channel for too long, and block the consensus-critical + // commit logic, which needs to acquire a write lock on the watch channel. + // + // Instead, we "proxy" through a dispatcher task that copies values from one + // channel to the other, ensuring that if an API consumer misuses the watch + // channels, it will only affect other subscribers, not the commit logic. + + let (snapshot_tx, snapshot_rx) = watch::channel(latest_snapshot.clone()); + // Note: this will never be seen by consumers, since we mark the current value as seen + // before returning the receiver. + let dummy_cache = (u64::MAX, Arc::new(Cache::default())); + let (changes_tx, changes_rx) = watch::channel(dummy_cache.clone()); + let (tx_dispatcher, mut rx_dispatcher) = watch::channel((latest_snapshot, dummy_cache)); + + let jh_dispatcher = tokio::spawn(async move { + tracing::info!("snapshot dispatcher task has started"); + // If the sender is dropped, the task will terminate. + while rx_dispatcher.changed().await.is_ok() { + tracing::debug!("dispatcher has received a new snapshot"); + let (snapshot, changes) = rx_dispatcher.borrow_and_update().clone(); + // [`watch::Sender::send`] only returns an error if there are no + // receivers, so we can safely ignore the result here. + let _ = snapshot_tx.send(snapshot); + let _ = changes_tx.send(changes); + } + tracing::info!("dispatcher task has terminated") + }); + + Ok(Self(Arc::new(Inner { + // We don't need to wrap the task in a `CancelOnDrop` because + // the task will stop when the sender is dropped. However, certain + // test scenarios require us to wait that all resources are released. + jh_dispatcher: Some(jh_dispatcher), + dispatcher_tx: tx_dispatcher, + snapshot_rx, + changes_rx, + multistore_config, + snapshots, + db: shared_db, + }))) + }) + }) + .await? + } + + /// Returns the latest version (block height) of the tree recorded by the + /// `Storage`. + /// + /// If the tree is empty and has not been initialized, returns `u64::MAX`. + pub fn latest_version(&self) -> jmt::Version { + self.latest_snapshot().version() + } + + /// Returns a [`watch::Receiver`] that can be used to subscribe to new state versions. + pub fn subscribe(&self) -> watch::Receiver { + let mut rx = self.0.snapshot_rx.clone(); + // Mark the current value as seen, so that the user of the receiver + // will only be notified of *subsequent* values. + rx.borrow_and_update(); + rx + } + + /// Returns a [`watch::Receiver`] that can be used to subscribe to state changes. + pub fn subscribe_changes(&self) -> watch::Receiver<(jmt::Version, Arc)> { + let mut rx = self.0.changes_rx.clone(); + // Mark the current value as seen, so that the user of the receiver + // will only be notified of *subsequent* values. + rx.borrow_and_update(); + rx + } + + /// Returns a new [`Snapshot`] on top of the latest version of the tree. + pub fn latest_snapshot(&self) -> Snapshot { + self.0.snapshots.read().latest() + } + + /// Fetches the [`Snapshot`] corresponding to the supplied `jmt::Version` from + /// the [`SnapshotCache`]. Returns `None` if no match was found. + pub fn snapshot(&self, version: jmt::Version) -> Option { + self.0.snapshots.read().get(version) + } + + /// Prepares a commit for the provided [`StateDelta`], returning a [`StagedWriteBatch`]. + /// The batch can be committed to the database using the [`Storage::commit_batch`] method. + pub async fn prepare_commit(&self, delta: StateDelta) -> Result { + // Extract the snapshot and the changes from the state delta + let (snapshot, changes) = delta.flatten(); + let prev_snapshot_version = snapshot.version(); + + // We use wrapping_add here so that we can write `new_version = 0` by + // overflowing `PRE_GENESIS_VERSION`. + let prev_storage_version = self.latest_version(); + let next_storage_version = prev_storage_version.wrapping_add(1); + tracing::debug!(prev_storage_version, next_storage_version); + + ensure!( + prev_storage_version == prev_snapshot_version, + "trying to prepare a commit for a delta forked from version {}, but the latest version is {}", + prev_snapshot_version, + prev_storage_version + ); + + self.prepare_commit_inner(snapshot, changes, next_storage_version, false) + .await + } + + async fn prepare_commit_inner( + &self, + snapshot: Snapshot, + cache: Cache, + version: jmt::Version, + perform_migration: bool, + ) -> Result { + tracing::debug!(new_jmt_version = ?version, "preparing to commit state delta"); + // Save a copy of the changes to send to subscribers later. + let changes = Arc::new(cache.clone_changes()); + + let mut changes_by_substore = cache.shard_by_prefix(&self.0.multistore_config); + #[allow(clippy::disallowed_types)] + let mut substore_roots = HashMap::new(); + let mut multistore_versions = + multistore::MultistoreCache::from_config(self.0.multistore_config.clone()); + + let db = self.0.db.clone(); + let rocksdb_snapshot = snapshot.0.snapshot.clone(); + + let mut new_versions = vec![]; + + // We use a single write batch to commit all the substores at once. Each task will append + // its own changes to the batch, and we will commit it at the end. + let mut write_batch = rocksdb::WriteBatch::default(); + + // Note(erwan): Here, we spawn a commit task for each substore. + // The substore keyspaces are disjoint, so conceptually it is + // fine to rewrite it using a [`tokio::task::JoinSet`]. + // The reason this isn't done is because `rocksdb::WriteBatch` + // is _not_ thread-safe. + // + // This means that to spin-up N tasks, we would need to use a + // single batch wrapped in a mutex, or use N batches, and find + // a way to commit to them atomically. This isn't supported by + // RocksDB which leaves one option: to iterate over each entry + // in each batch, and merge them together. At this point, this + // is probably not worth it. + // + // Another option is to trade atomicity for parallelism by producing + // N batches, and committing them in distinct atomic writes. This is + // potentially faster, but it is also more dangerous, because if one + // of the writes fails, we are left with a partially committed state. + // + // The current implementation leans on the fact that the number of + // substores is small, and that the synchronization overhead of a joinset + // would exceed its benefits. This works well for now. + for config in self.0.multistore_config.iter() { + tracing::debug!(substore_prefix = ?config.prefix, "processing substore"); + // If the substore is empty, we need to fetch its initialized version from the cache. + let old_substore_version = config + .latest_version_from_snapshot(&db, &rocksdb_snapshot)? + .unwrap_or_else(|| { + tracing::debug!("substore is empty, fetching initialized version from cache"); + snapshot + .substore_version(config) + .expect("prefix should be initialized") + }); + + let Some(changeset) = changes_by_substore.remove(config) else { + tracing::debug!(prefix = config.prefix, "no changes for substore, skipping"); + multistore_versions.set_version(config.clone(), old_substore_version); + continue; + }; + + let new_version = if perform_migration { + old_substore_version + } else { + old_substore_version.wrapping_add(1) + }; + new_versions.push(new_version); + let substore_snapshot = SubstoreSnapshot { + config: config.clone(), + rocksdb_snapshot: rocksdb_snapshot.clone(), + version: new_version, + db: db.clone(), + }; + + let substore_storage = SubstoreStorage { substore_snapshot }; + + // Commit the substore and collect its root hash + let (root_hash, substore_batch) = substore_storage + .commit(changeset, write_batch, new_version, perform_migration) + .await?; + write_batch = substore_batch; + + tracing::debug!( + ?root_hash, + prefix = config.prefix, + ?version, + "added substore to write batch" + ); + substore_roots.insert(config.clone(), (root_hash, new_version)); + + tracing::debug!( + ?root_hash, + prefix = ?config.prefix, + ?new_version, + "updating substore version" + ); + multistore_versions.set_version(config.clone(), new_version); + } + + // Add substore roots to the main store changeset + let main_store_config = self.0.multistore_config.main_store.clone(); + let mut main_store_changes = changes_by_substore + .remove(&main_store_config) + .unwrap_or_else(|| { + tracing::debug!("no changes for main store, creating empty changeset"); + Cache::default() + }); + + for (config, (root_hash, _)) in substore_roots.iter() { + main_store_changes + .unwritten_changes + .insert(config.prefix.to_string(), Some(root_hash.0.to_vec())); + } + + // Commit the main store and collect the global root hash + let main_store_snapshot = SubstoreSnapshot { + config: main_store_config.clone(), + rocksdb_snapshot: snapshot.0.snapshot.clone(), + version, + db: self.0.db.clone(), + }; + + let main_store_storage = SubstoreStorage { + substore_snapshot: main_store_snapshot, + }; + + let (global_root_hash, write_batch) = main_store_storage + .commit(main_store_changes, write_batch, version, perform_migration) + .await?; + tracing::debug!( + ?global_root_hash, + ?version, + "added main store to write batch" + ); + + tracing::debug!(?global_root_hash, version = ?version, "updating main store version"); + let main_store_config = self.0.multistore_config.main_store.clone(); + multistore_versions.set_version(main_store_config, version); + + Ok(StagedWriteBatch { + write_batch, + version, + multistore_versions, + root_hash: global_root_hash, + substore_roots, + perform_migration, + changes, + }) + } + + /// Commits the provided [`StateDelta`] to persistent storage as the latest + /// version of the chain state. + pub async fn commit(&self, delta: StateDelta) -> Result { + let batch = self.prepare_commit(delta).await?; + self.commit_batch(batch) + } + + /// Commits the supplied [`StagedWriteBatch`] to persistent storage. + /// + /// # Migrations + /// In the case of chain state migrations we need to commit the new state + /// without incrementing the version. If `perform_migration` is `true` the + /// snapshot will _not_ be written to the snapshot cache, and no subscribers + /// will be notified. Substore versions will not be updated. + pub fn commit_batch(&self, batch: StagedWriteBatch) -> Result { + let StagedWriteBatch { + write_batch, + version, + multistore_versions, + root_hash: global_root_hash, + substore_roots, + perform_migration, + changes, + } = batch; + + let db = self.0.db.clone(); + + // check that the version of the batch being committed is the correct next version + let old_version = self.latest_version(); + let expected_new_version = if perform_migration { + old_version + } else { + old_version.wrapping_add(1) + }; + + ensure!( + expected_new_version == version, + "new version mismatch: expected {} but got {}", + expected_new_version, + version + ); + + // also check that each of the substore versions are the correct next version + let snapshot = self.latest_snapshot(); + + // Warning: we MUST check version coherence for **every** substore. + // These checks are a second line of defense. They must consider + // the case when two deltas effect distinct substores. + // + // version: (m, ss_1, ss_2) + // D_0: (_, 1, 0) <- initial state + // D_1: (A, 1, 1) <- multiwrite to ss_1 AND ss_2 + // D_1*: (A, 1, 0) <- isolate write to ss_1 + // + // A comprehensive check lets us catch the stale write D_1* even if + // locally it does not directly effect the second substore at all. + // And even if the main version check passes (spuriously, or because of + // a migration). + for (substore_config, new_version) in &multistore_versions.substores { + if substore_config.prefix.is_empty() { + // this is the main store, ignore + continue; + } + + let old_substore_version = snapshot + .substore_version(substore_config) + .expect("substores must be initialized at startup"); + + // if the substore exists in `substore_roots`, there have been updates to the substore. + // if `perform_migration` is false and there are updates, the next version should be previous + 1. + // otherwise, the version should remain the same. + let expected_substore_version = + if substore_roots.get(substore_config).is_some() && !perform_migration { + old_substore_version.wrapping_add(1) + } else { + old_substore_version + }; + + ensure!( + expected_substore_version == *new_version, + "substore new version mismatch for substore with prefix {}: expected {} but got {}", + substore_config.prefix, + expected_substore_version, + new_version + ); + } + + tracing::debug!(new_jmt_version = ?batch.version, "committing batch to db"); + + db.write(write_batch).expect("can write to db"); + tracing::debug!( + ?global_root_hash, + ?version, + "committed main store and substores to db" + ); + + // If we're not performing a migration, we should update the snapshot cache + if !perform_migration { + tracing::debug!("updating snapshot cache"); + + let latest_snapshot = Snapshot::new(db.clone(), version, multistore_versions); + // Obtain a write lock to the snapshot cache, and push the latest snapshot + // available. The lock guard is implicitly dropped immediately. + self.0 + .snapshots + .write() + .try_push(latest_snapshot.clone()) + .expect("should process snapshots with consecutive jmt versions"); + + tracing::debug!(?version, "dispatching snapshot"); + + // Send fails if the channel is closed (i.e., if there are no receivers); + // in this case, we should ignore the error, we have no one to notify. + let _ = self + .0 + .dispatcher_tx + .send((latest_snapshot, (version, changes))); + } else { + tracing::debug!("skipping snapshot cache update"); + } + + Ok(global_root_hash) + } + + #[cfg(feature = "migration")] + /// Commit the provided [`StateDelta`] to persistent storage without increasing the version + /// of the chain state, and skips the snapshot cache update. + pub async fn commit_in_place(&self, delta: StateDelta) -> Result { + let (snapshot, changes) = delta.flatten(); + let old_version = self.latest_version(); + let batch = self + .prepare_commit_inner(snapshot, changes, old_version, true) + .await?; + self.commit_batch(batch) + } + + /// Returns the internal handle to RocksDB, this is useful to test adjacent storage crates. + #[cfg(test)] + pub(crate) fn db(&self) -> Arc { + self.0.db.clone() + } + + /// Shuts down the database and the dispatcher task, and waits for all resources to be reclaimed. + /// Panics if there are still outstanding references to the `Inner` storage. + pub async fn release(mut self) { + if let Some(inner) = Arc::get_mut(&mut self.0) { + inner.shutdown().await; + inner.snapshots.write().clear(); + // `Inner` is dropped once the call completes. + } else { + panic!("Unable to get mutable reference to Inner"); + } + } +} + +impl Inner { + pub(crate) async fn shutdown(&mut self) { + if let Some(jh) = self.jh_dispatcher.take() { + jh.abort(); + let _ = jh.await; + } + } +} diff --git a/crates/cnidarium/src/storage/temp.rs b/crates/cnidarium/src/storage/temp.rs new file mode 100644 index 0000000000..e82368c899 --- /dev/null +++ b/crates/cnidarium/src/storage/temp.rs @@ -0,0 +1,43 @@ +use crate::Storage; +use std::ops::Deref; +use tempfile::TempDir; + +/// A [`Storage`] instance backed by a [`tempfile::TempDir`] for testing. +/// +/// The `TempDir` handle is bundled into the `TempStorage`, so the temporary +/// directory is cleaned up when the `TempStorage` instance is dropped. +pub struct TempStorage { + inner: Storage, + _dir: TempDir, +} + +impl Deref for TempStorage { + type Target = Storage; + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +impl AsRef for TempStorage { + fn as_ref(&self) -> &Storage { + &self.inner + } +} + +impl TempStorage { + pub async fn new() -> anyhow::Result { + let dir = tempfile::tempdir()?; + let db_filepath = dir.path().join("storage.db"); + let inner = Storage::load(db_filepath.clone(), vec![]).await?; + + Ok(TempStorage { inner, _dir: dir }) + } + + pub async fn new_with_prefixes(prefixes: Vec) -> anyhow::Result { + let dir = tempfile::tempdir()?; + let db_filepath = dir.path().join("storage.db"); + let inner = Storage::load(db_filepath.clone(), prefixes).await?; + + Ok(TempStorage { inner, _dir: dir }) + } +} diff --git a/crates/cnidarium/src/store.rs b/crates/cnidarium/src/store.rs new file mode 100644 index 0000000000..3e703f1f05 --- /dev/null +++ b/crates/cnidarium/src/store.rs @@ -0,0 +1,2 @@ +pub(crate) mod multistore; +pub(crate) mod substore; diff --git a/crates/cnidarium/src/store/multistore.rs b/crates/cnidarium/src/store/multistore.rs new file mode 100644 index 0000000000..b52ede0dc3 --- /dev/null +++ b/crates/cnidarium/src/store/multistore.rs @@ -0,0 +1,233 @@ +use std::{fmt::Display, sync::Arc}; + +use super::substore::SubstoreConfig; + +/// A collection of substore, each with a unique prefix. +#[derive(Debug, Clone)] +pub struct MultistoreConfig { + pub main_store: Arc, + pub substores: Vec>, +} + +impl MultistoreConfig { + pub fn iter(&self) -> impl Iterator> { + self.substores.iter() + } + + /// Returns the substore matching the key's prefix, return `None` otherwise. + pub fn find_substore(&self, key: &[u8]) -> Option> { + if key.is_empty() { + return Some(self.main_store.clone()); + } + + // Note: This is a linear search, but the number of substores is small. + self.substores + .iter() + .find(|s| key.starts_with(s.prefix.as_bytes())) + .cloned() + } + + /// Route a key to a substore, and return the truncated key and the corresponding `SubstoreConfig`. + /// + /// This method is used for ordinary key-value operations. + /// + /// Note: since this method implements the routing logic for the multistore, + /// callers might prefer [`MultistoreConfig::match_prefix_str`] if they don't + /// need to route the key. + /// + /// # Routing + /// + If the key is a total match for the prefix, the **main store** is returned. + /// + If the key is not a total match for the prefix, the prefix is removed from + /// the key and the key is routed to the substore matching the prefix. + /// + If the key does not match any prefix, the key is routed to the **main store**. + /// + If a delimiter is prefixing the key, it is removed. + /// + /// # Examples + /// `prefix_a/key` -> `key` in `substore_a` + /// `prefix_akey` -> `prefix_akey` in `main_store + /// `prefix_a` -> `prefix_a` in `main_store` + /// `prefix_a/` -> `prefix_a/` in `main_store + /// `nonexistent_prefix` -> `nonexistent_prefix` in `main_store` + pub fn route_key_str<'a>(&self, key: &'a str) -> (&'a str, Arc) { + let config = self + .find_substore(key.as_bytes()) + .unwrap_or_else(|| self.main_store.clone()); + + // If the key is a total match, we want to return the key bound to the + // main store. This is where the root hash of the prefix tree is located. + if key == config.prefix { + return (key, self.main_store.clone()); + } + + let truncated_key = key + .strip_prefix(&config.prefix) + .expect("key has the prefix of the matched substore"); + + // If the key does not contain a delimiter, we return the original key + // routed to the main store. This is because we do not want to allow + // collisions e.g. `prefix_a/key` and `prefix_akey`. + let Some(matching_key) = truncated_key.strip_prefix('/') else { + return (key, self.main_store.clone()); + }; + + // If the matching key is empty, we return the original key routed to + // the main store. This is because we do not want to allow empty keys + // in the substore. + if matching_key.is_empty() { + (key, self.main_store.clone()) + } else { + (matching_key, config) + } + } + + /// Route a key to a substore, and return the truncated key and the corresponding `SubstoreConfig`. + /// + /// This method is used for ordinary key-value operations. + /// + /// Note: since this method implements the routing logic for the multistore, + /// callers might prefer [`MultistoreConfig::match_prefix_bytes`] if they don't + /// need to route the key. + /// + /// # Routing + /// + If the key is a total match for the prefix, the **main store** is returned. + /// + If the key is not a total match for the prefix, the prefix is removed from + /// the key and the key is routed to the substore matching the prefix. + /// + If the key does not match any prefix, the key is routed to the **main store**. + /// + If a delimiter is prefixing the key, it is removed. + /// + /// # Examples + /// `prefix_a/key` -> `key` in `substore_a` + /// `prefix_a` -> `prefix_a` in `main_store` + /// `prefix_a/` -> `prefix_a/` in `main_store` + /// `nonexistent_prefix` -> `nonexistent_prefix` in `main_store` + pub fn route_key_bytes<'a>(&self, key: &'a [u8]) -> (&'a [u8], Arc) { + let config = self + .find_substore(key) + .unwrap_or_else(|| self.main_store.clone()); + + // If the key is a total match for the prefix, we return the original key + // routed to the main store. This is where subtree root hashes are stored. + if key == config.prefix.as_bytes() { + return (key, self.main_store.clone()); + } + + let truncated_key = key + .strip_prefix(config.prefix.as_bytes()) + .expect("key has the prefix of the matched substore"); + + // If the key does not contain a delimiter, we return the original key + // routed to the main store. This is because we do not want to allow + // collisions e.g. `prefix_a/key` and `prefix_akey`. + let Some(matching_key) = truncated_key.strip_prefix(b"/") else { + return (key, self.main_store.clone()); + }; + + // If the matching key is empty, we return the original key routed to + // the main store. This is because we do not want to allow empty keys + // in the substore. + if matching_key.is_empty() { + (key, self.main_store.clone()) + } else { + (matching_key, config) + } + } + + /// Returns the truncated prefix and the corresponding `SubstoreConfig`. + /// + /// This method is used to implement prefix iteration. + /// + /// Unlike [`MultistoreConfig::route_key_str`], this method does not do any routing. + /// It simply finds the substore matching the prefix, strip the prefix and delimiter, + /// and returns the truncated prefix and the corresponding `SubstoreConfig`. + /// + /// # Examples + /// `prefix_a/key` -> `key` in `substore_a` + /// `prefix_a` -> "" in `substore_a` + /// `prefix_a/` -> "" in `substore_a` + /// `nonexistent_prefix` -> "" in `main_store` + pub fn match_prefix_str<'a>(&self, prefix: &'a str) -> (&'a str, Arc) { + let config = self + .find_substore(prefix.as_bytes()) + .unwrap_or_else(|| self.main_store.clone()); + + let truncated_prefix = prefix + .strip_prefix(&config.prefix) + .expect("key has the prefix of the matched substore"); + + let truncated_prefix = truncated_prefix + .strip_prefix('/') + .unwrap_or(truncated_prefix); + (truncated_prefix, config) + } + + /// Returns the truncated prefix and the corresponding `SubstoreConfig`. + /// + /// Unlike [`MultistoreConfig::route_key_str`], this method does not do any routing. + /// It simply finds the substore matching the prefix, strip the prefix and delimiter, + /// and returns the truncated prefix and the corresponding `SubstoreConfig`. + /// + /// This method is used to implement prefix iteration. + /// + /// # Examples + /// `prefix_a/key` -> `key` in `substore_a` + /// `prefix_a` -> "" in `substore_a` + /// `prefix_a/` -> "" in `substore_a` + /// `nonexistent_prefix` -> "" in `main_store` + pub fn match_prefix_bytes<'a>(&self, prefix: &'a [u8]) -> (&'a [u8], Arc) { + let config = self + .find_substore(prefix) + .unwrap_or_else(|| self.main_store.clone()); + + let truncated_prefix = prefix + .strip_prefix(config.prefix.as_bytes()) + .expect("key has the prefix of the matched substore"); + + let truncated_prefix = truncated_prefix + .strip_prefix(b"/") + .unwrap_or(truncated_prefix); + (truncated_prefix, config) + } +} + +impl Default for MultistoreConfig { + fn default() -> Self { + Self { + main_store: Arc::new(SubstoreConfig::new("")), + substores: vec![], + } + } +} + +/// Tracks the latest version of each substore, and wraps a `MultistoreConfig`. +#[derive(Default, Debug)] +pub struct MultistoreCache { + pub config: MultistoreConfig, + pub substores: std::collections::BTreeMap, jmt::Version>, +} + +impl Display for MultistoreCache { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let mut s = String::new(); + for (substore, version) in &self.substores { + s.push_str(&format!("{}: {}\n", substore.prefix, version)); + } + write!(f, "{}", s) + } +} + +impl MultistoreCache { + pub fn from_config(config: MultistoreConfig) -> Self { + Self { + config, + substores: std::collections::BTreeMap::new(), + } + } + + pub fn set_version(&mut self, substore: Arc, version: jmt::Version) { + self.substores.insert(substore, version); + } + + pub fn get_version(&self, substore: &Arc) -> Option { + self.substores.get(substore).cloned() + } +} diff --git a/crates/cnidarium/src/store/substore.rs b/crates/cnidarium/src/store/substore.rs new file mode 100644 index 0000000000..876a94b744 --- /dev/null +++ b/crates/cnidarium/src/store/substore.rs @@ -0,0 +1,546 @@ +use std::{ + fmt::{Display, Formatter}, + sync::Arc, +}; + +use anyhow::Result; +use borsh::BorshDeserialize; +use jmt::{ + storage::{HasPreimage, LeafNode, Node, NodeKey, TreeReader}, + KeyHash, RootHash, +}; +use rocksdb::{ColumnFamily, IteratorMode, ReadOptions}; +use tracing::Span; + +use crate::{snapshot::RocksDbSnapshot, Cache}; + +use jmt::storage::TreeWriter; + +/// Specifies the configuration of a substore, which is a prefixed subset of +/// the main store with its own merkle tree, nonverifiable data, preimage index, etc. +#[derive(Debug, Eq, PartialEq, PartialOrd, Ord, Hash)] +pub struct SubstoreConfig { + /// The prefix of the substore. If empty, it is the root-level store config. + pub prefix: String, + /// The prefix of the substore including the trailing slash. + pub prefix_with_delimiter: String, + /// name: "substore-{prefix}-jmt" + /// role: persists the logical structure of the JMT + /// maps: `storage::DbNodeKey` to `jmt::Node` + // note: `DbNodeKey` is a newtype around `NodeKey` that serialize the key + // so that it maps to a lexicographical ordering with ascending jmt::Version. + cf_jmt: String, + /// name: "susbstore-{prefix}-jmt-keys" + /// role: JMT key index. + /// maps: key preimages to their keyhash. + cf_jmt_keys: String, + /// name: "substore-{prefix}-jmt-values" + /// role: stores the actual values that JMT leaves point to. + /// maps: KeyHash || BE(version) to an `Option>` + cf_jmt_values: String, + /// name: "substore-{prefix}-jmt-keys-by-keyhash" + /// role: index JMT keys by their keyhash. + /// maps: keyhashes to their preimage. + cf_jmt_keys_by_keyhash: String, + /// name: "substore-{prefix}-nonverifiable" + /// role: auxiliary data that is not part of our merkle tree, and thus not strictly + /// part of consensus. + /// maps: arbitrary keys to arbitrary values. + cf_nonverifiable: String, +} + +impl SubstoreConfig { + pub fn new(prefix: impl ToString) -> Self { + let prefix = prefix.to_string(); + Self { + cf_jmt: format!("substore-{}-jmt", prefix), + cf_jmt_keys: format!("substore-{}-jmt-keys", prefix), + cf_jmt_values: format!("substore-{}-jmt-values", prefix), + cf_jmt_keys_by_keyhash: format!("substore-{}-jmt-keys-by-keyhash", prefix), + cf_nonverifiable: format!("substore-{}-nonverifiable", prefix), + prefix_with_delimiter: format!("{}/", prefix), + prefix, + } + } + + /// Returns an iterator over all column families in this substore. + /// Note(erwan): This is verbose, but very lightweight. + pub fn columns(&self) -> impl Iterator { + std::iter::once(&self.cf_jmt) + .chain(std::iter::once(&self.cf_jmt_keys)) + .chain(std::iter::once(&self.cf_jmt_values)) + .chain(std::iter::once(&self.cf_jmt_keys_by_keyhash)) + .chain(std::iter::once(&self.cf_nonverifiable)) + } + + pub fn cf_jmt<'s>(&self, db_handle: &'s Arc) -> &'s ColumnFamily { + let column = self.cf_jmt.as_str(); + db_handle.cf_handle(column).unwrap_or_else(|| { + panic!( + "jmt column family not found for prefix: {}, substore: {}", + column, self.prefix + ) + }) + } + + pub fn cf_jmt_values<'s>(&self, db_handle: &'s Arc) -> &'s ColumnFamily { + let column = self.cf_jmt_values.as_str(); + db_handle.cf_handle(column).unwrap_or_else(|| { + panic!( + "jmt-values column family not found for prefix: {}, substore: {}", + column, self.prefix + ) + }) + } + + pub fn cf_jmt_keys_by_keyhash<'s>(&self, db_handle: &'s Arc) -> &'s ColumnFamily { + let column = self.cf_jmt_keys_by_keyhash.as_str(); + db_handle.cf_handle(column).unwrap_or_else(|| { + panic!( + "jmt-keys-by-keyhash column family not found for prefix: {}, substore: {}", + column, self.prefix + ) + }) + } + + pub fn cf_jmt_keys<'s>(&self, db_handle: &'s Arc) -> &'s ColumnFamily { + let column = self.cf_jmt_keys.as_str(); + db_handle.cf_handle(column).unwrap_or_else(|| { + panic!( + "jmt-keys column family not found for prefix: {}, substore: {}", + column, self.prefix + ) + }) + } + + pub fn cf_nonverifiable<'s>(&self, db_handle: &'s Arc) -> &'s ColumnFamily { + let column = self.cf_nonverifiable.as_str(); + db_handle.cf_handle(column).unwrap_or_else(|| { + panic!( + "nonverifiable column family not found for prefix: {}, substore: {}", + column, self.prefix + ) + }) + } + + pub fn latest_version_from_db( + &self, + db_handle: &Arc, + ) -> Result> { + Ok(self + .get_rightmost_leaf_from_db(db_handle)? + .map(|(node_key, _)| node_key.version())) + } + + pub fn latest_version_from_snapshot( + &self, + db_handle: &Arc, + snapshot: &RocksDbSnapshot, + ) -> Result> { + Ok(self + .get_rightmost_leaf_from_snapshot(db_handle, snapshot)? + .map(|(node_key, _)| node_key.version())) + } + + // TODO(erwan): having two different implementations of this is a bit weird and should + // be refactored, or remodeled. The DB version is only used during initialization, before + // a `Snapshot` is available. + fn get_rightmost_leaf_from_db( + &self, + db_handle: &Arc, + ) -> Result> { + let cf_jmt = self.cf_jmt(db_handle); + let mut iter = db_handle.raw_iterator_cf(cf_jmt); + iter.seek_to_last(); + + if iter.valid() { + let node_key = + DbNodeKey::decode(iter.key().expect("all DB entries should have a key"))? + .into_inner(); + let node = + Node::try_from_slice(iter.value().expect("all DB entries should have a value"))?; + + if let Node::Leaf(leaf_node) = node { + return Ok(Some((node_key, leaf_node))); + } + } else { + // There are no keys in the database + } + + Ok(None) + } + + fn get_rightmost_leaf_from_snapshot( + &self, + db_handle: &Arc, + snapshot: &RocksDbSnapshot, + ) -> Result> { + let cf_jmt = self.cf_jmt(db_handle); + let mut iter = snapshot.iterator_cf(cf_jmt, IteratorMode::End); + let Some((raw_key, raw_value)) = iter.next().transpose()? else { + return Ok(None); + }; + + let node_key = DbNodeKey::decode(&raw_key)?.into_inner(); + let Node::Leaf(leaf) = Node::try_from_slice(&raw_value)? else { + return Ok(None); + }; + Ok(Some((node_key, leaf))) + } +} + +impl Display for SubstoreConfig { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "SubstoreConfig(prefix={})", self.prefix) + } +} + +/// A read-only view into a substore at a specific state version. +/// +/// A [`SubstoreSnapshot`] is lightweight and cheap to create, it can be +/// instantiated on-demand when a read-only view of a substore's state is +/// needed. +pub struct SubstoreSnapshot { + pub(crate) config: Arc, + pub(crate) rocksdb_snapshot: Arc, + pub(crate) version: jmt::Version, + pub(crate) db: Arc, +} + +impl SubstoreSnapshot { + pub fn root_hash(&self) -> Result { + let version = self.version(); + let tree = jmt::Sha256Jmt::new(self); + Ok(tree + .get_root_hash_option(version)? + .unwrap_or(jmt::RootHash([0; 32]))) + } + + pub fn version(&self) -> jmt::Version { + self.version + } + + /// Returns some value corresponding to the key, along with an ICS23 existence proof + /// up to the current JMT root hash. If the key is not present, returns `None` and a + /// non-existence proof. + pub(crate) fn get_with_proof( + &self, + key: Vec, + ) -> Result<(Option>, ics23::CommitmentProof)> { + let version = self.version(); + let tree = jmt::Sha256Jmt::new(self); + tree.get_with_ics23_proof(key, version) + } + + /// Helper function used by `get_raw` and `prefix_raw`. + /// + /// Reads from the JMT will fail if the root is missing; this method + /// special-cases the empty tree case so that reads on an empty tree just + /// return None. + pub fn get_jmt(&self, key: jmt::KeyHash) -> Result>> { + let tree = jmt::Sha256Jmt::new(self); + match tree.get(key, self.version()) { + Ok(Some(value)) => { + tracing::trace!(substore = ?self.config.prefix, version = ?self.version(), ?key, value = ?hex::encode(&value), "read from tree"); + Ok(Some(value)) + } + Ok(None) => { + tracing::trace!(substore = ?self.config.prefix, version = ?self.version(), ?key, "key not found in tree"); + Ok(None) + } + // This allows for using the Overlay on an empty database without + // errors We only skip the `MissingRootError` if the `version` is + // `u64::MAX`, the pre-genesis version. Otherwise, a missing root + // actually does indicate a problem. + Err(e) + if e.downcast_ref::().is_some() + && self.version() == u64::MAX => + { + tracing::trace!(substore = ?self.config.prefix, version = ?self.version(), "no data available at this version"); + Ok(None) + } + Err(e) => Err(e), + } + } +} + +impl TreeReader for SubstoreSnapshot { + /// Gets a value by identifier, returning the newest value whose version is *less than or + /// equal to* the specified version. Returns `None` if the value does not exist. + fn get_value_option( + &self, + max_version: jmt::Version, + key_hash: KeyHash, + ) -> Result> { + let cf_jmt_values = self.config.cf_jmt_values(&self.db); + + // Prefix ranges exclude the upper bound in the iterator result. + // This means that when requesting the largest possible version, there + // is no way to specify a range that is inclusive of `u64::MAX`. + if max_version == u64::MAX { + let k = VersionedKeyHash { + version: u64::MAX, + key_hash, + }; + + if let Some(v) = self.rocksdb_snapshot.get_cf(cf_jmt_values, k.encode())? { + let maybe_value: Option> = BorshDeserialize::try_from_slice(v.as_ref())?; + return Ok(maybe_value); + } + } + + let mut lower_bound = key_hash.0.to_vec(); + lower_bound.extend_from_slice(&0u64.to_be_bytes()); + + let mut upper_bound = key_hash.0.to_vec(); + // The upper bound is excluded from the iteration results. + upper_bound.extend_from_slice(&(max_version.saturating_add(1)).to_be_bytes()); + + let mut readopts = ReadOptions::default(); + readopts.set_iterate_lower_bound(lower_bound); + readopts.set_iterate_upper_bound(upper_bound); + let mut iterator = + self.rocksdb_snapshot + .iterator_cf_opt(cf_jmt_values, readopts, IteratorMode::End); + + let Some(tuple) = iterator.next() else { + return Ok(None); + }; + + let (_key, v) = tuple?; + let maybe_value = BorshDeserialize::try_from_slice(v.as_ref())?; + Ok(maybe_value) + } + + /// Gets node given a node key. Returns `None` if the node does not exist. + fn get_node_option(&self, node_key: &NodeKey) -> Result> { + let db_node_key = DbNodeKey::from(node_key.clone()); + tracing::trace!(?node_key); + + let cf_jmt = self.config.cf_jmt(&self.db); + let value = self + .rocksdb_snapshot + .get_cf(cf_jmt, db_node_key.encode()?)? + .map(|db_slice| Node::try_from_slice(&db_slice)) + .transpose()?; + + tracing::trace!(?node_key, ?value); + Ok(value) + } + + fn get_rightmost_leaf(&self) -> Result> { + let cf_jmt = self.config.cf_jmt(&self.db); + let mut iter = self.rocksdb_snapshot.raw_iterator_cf(cf_jmt); + iter.seek_to_last(); + + if iter.valid() { + let node_key = + DbNodeKey::decode(iter.key().expect("all DB entries should have a key"))? + .into_inner(); + let node = + Node::try_from_slice(iter.value().expect("all DB entries should have a value"))?; + + if let Node::Leaf(leaf_node) = node { + return Ok(Some((node_key, leaf_node))); + } + } else { + // There are no keys in the database + } + + Ok(None) + } +} + +impl HasPreimage for SubstoreSnapshot { + fn preimage(&self, key_hash: KeyHash) -> Result>> { + let cf_jmt_keys_by_keyhash = self.config.cf_jmt_keys_by_keyhash(&self.db); + + Ok(self + .rocksdb_snapshot + .get_cf(cf_jmt_keys_by_keyhash, key_hash.0)?) + } +} + +pub struct SubstoreStorage { + pub(crate) substore_snapshot: SubstoreSnapshot, +} + +impl SubstoreStorage { + pub async fn commit( + self, + cache: Cache, + mut write_batch: rocksdb::WriteBatch, + write_version: jmt::Version, + perform_migration: bool, + ) -> Result<(RootHash, rocksdb::WriteBatch)> { + let span = Span::current(); + + tokio::task + ::spawn_blocking(move || { + span.in_scope(|| { + let jmt = jmt::Sha256Jmt::new(&self.substore_snapshot); + let unwritten_changes: Vec<_> = cache + .unwritten_changes + .into_iter() + .map(|(key, some_value)| (KeyHash::with::(&key), key, some_value)) + .collect(); + + let cf_jmt_keys = self.substore_snapshot.config.cf_jmt_keys(&self.substore_snapshot.db); + let cf_jmt_keys_by_keyhash = self.substore_snapshot.config.cf_jmt_keys_by_keyhash(&self.substore_snapshot.db); + let cf_jmt = self.substore_snapshot.config.cf_jmt(&self.substore_snapshot.db); + let cf_jmt_values = self.substore_snapshot.config.cf_jmt_values(&self.substore_snapshot.db); + + /* Keyhash and pre-image indices */ + for (keyhash, key_preimage, value) in unwritten_changes.iter() { + match value { + Some(_) => { /* Key inserted, or updated, so we add it to the keyhash index */ + write_batch.put_cf(cf_jmt_keys, key_preimage, keyhash.0); + write_batch + .put_cf(cf_jmt_keys_by_keyhash, keyhash.0, key_preimage) + } + None => { /* Key deleted, so we delete it from the preimage and keyhash index entries */ + write_batch.delete_cf(cf_jmt_keys, key_preimage); + write_batch.delete_cf(cf_jmt_keys_by_keyhash, keyhash.0); + } + }; + } + + // We only track the keyhash and possible values; at the time of writing, + // `rustfmt` panics on inlining the closure, so we use a helper function to skip the key. + let skip_key = |(keyhash, _key, some_value)| (keyhash, some_value); + + let (root_hash, batch) = if perform_migration { + jmt.append_value_set(unwritten_changes.into_iter().map(skip_key), write_version)? + } else { + jmt.put_value_set(unwritten_changes.into_iter().map(skip_key), write_version)? + }; + + /* JMT nodes and values */ + for (node_key, node) in batch.node_batch.nodes() { + let db_node_key_bytes= DbNodeKey::encode_from_node_key(node_key)?; + let value_bytes = borsh::to_vec(node)?; + tracing::trace!(?db_node_key_bytes, value_bytes = ?hex::encode(&value_bytes)); + write_batch.put_cf(cf_jmt, db_node_key_bytes, value_bytes); + } + + + for ((version, key_hash), some_value) in batch.node_batch.values() { + let key_bytes = VersionedKeyHash::encode_from_keyhash(key_hash, version); + let value_bytes = borsh::to_vec(some_value)?; + tracing::trace!(?key_bytes, value_bytes = ?hex::encode(&value_bytes)); + write_batch.put_cf(cf_jmt_values, key_bytes, value_bytes); + } + + tracing::trace!(?root_hash, "accumulated node changes in the write batch"); + + + for (k, v) in cache.nonverifiable_changes.into_iter() { + let cf_nonverifiable = self.substore_snapshot.config.cf_nonverifiable(&self.substore_snapshot.db); + match v { + Some(v) => { + tracing::trace!(key = ?crate::EscapedByteSlice(&k), value = ?crate::EscapedByteSlice(&v), "put nonverifiable key"); + write_batch.put_cf(cf_nonverifiable, k, &v); + } + None => { + write_batch.delete_cf(cf_nonverifiable, k); + } + }; + } + + Ok((root_hash, write_batch)) + }) + }) + .await? + } +} + +impl TreeWriter for SubstoreStorage { + fn write_node_batch(&self, _node_batch: &jmt::storage::NodeBatch) -> Result<()> { + // The "write"-part of the `TreeReader + TreeWriter` jmt architecture does not work + // well with a deferred write strategy. + // What we would like to do is to accumulate the changes in a write batch, and then commit + // them all at once. This isn't possible to do easily because the `TreeWriter` trait + // rightfully does not expose RocksDB-specific types in its API. + // + // The alternative is to use interior mutability but the semantics become + // so implementation specific that we lose the benefits of the trait abstraction. + unimplemented!("We inline the tree writing logic in the `commit` method") + } +} + +/// An ordered node key is a node key that is encoded in a way that +/// preserves the order of the node keys in the database. +pub struct DbNodeKey(pub NodeKey); + +impl DbNodeKey { + pub fn from(node_key: NodeKey) -> Self { + DbNodeKey(node_key) + } + + pub fn into_inner(self) -> NodeKey { + self.0 + } + + pub fn encode(&self) -> Result> { + Self::encode_from_node_key(&self.0) + } + + pub fn encode_from_node_key(node_key: &NodeKey) -> Result> { + let mut bytes = Vec::new(); + bytes.extend_from_slice(&node_key.version().to_be_bytes()); // encode version as big-endian + let rest = borsh::to_vec(node_key)?; + bytes.extend_from_slice(&rest); + Ok(bytes) + } + + pub fn decode(bytes: impl AsRef<[u8]>) -> Result { + if bytes.as_ref().len() < 8 { + anyhow::bail!("byte slice is too short") + } + // Ignore the bytes that encode the version + let node_key_slice = bytes.as_ref()[8..].to_vec(); + let node_key = borsh::BorshDeserialize::try_from_slice(&node_key_slice)?; + Ok(DbNodeKey(node_key)) + } +} + +/// Represent a JMT key hash at a specific `jmt::Version` +/// This is used to index the JMT values in RocksDB. +#[derive(Clone, Debug)] +pub struct VersionedKeyHash { + pub key_hash: KeyHash, + pub version: jmt::Version, +} + +impl VersionedKeyHash { + pub fn encode(&self) -> Vec { + VersionedKeyHash::encode_from_keyhash(&self.key_hash, &self.version) + } + + pub fn encode_from_keyhash(key_hash: &KeyHash, version: &jmt::Version) -> Vec { + let mut buf: Vec = key_hash.0.to_vec(); + buf.extend_from_slice(&version.to_be_bytes()); + buf + } + + #[allow(dead_code)] + pub fn decode(buf: Vec) -> Result { + if buf.len() != 40 { + Err(anyhow::anyhow!( + "could not decode buffer into VersionedKey (invalid size)" + )) + } else { + let raw_key_hash: [u8; 32] = buf[0..32] + .try_into() + .expect("buffer is at least 40 bytes wide"); + let key_hash = KeyHash(raw_key_hash); + + let raw_version: [u8; 8] = buf[32..40] + .try_into() + .expect("buffer is at least 40 bytes wide"); + let version: u64 = u64::from_be_bytes(raw_version); + + Ok(VersionedKeyHash { version, key_hash }) + } + } +} diff --git a/crates/cnidarium/src/tests.rs b/crates/cnidarium/src/tests.rs new file mode 100644 index 0000000000..9fe5bb43a3 --- /dev/null +++ b/crates/cnidarium/src/tests.rs @@ -0,0 +1,1468 @@ +use crate::*; +use futures::StreamExt; + +/// Checks that deleting a nonexistent key behaves as expected (no errors, it's already gone) +#[tokio::test] +async fn delete_nonexistent_key() -> anyhow::Result<()> { + let tmpdir = tempfile::tempdir()?; + // Initialize an empty Storage in the new directory + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + + let mut state_init = StateDelta::new(storage.latest_snapshot()); + state_init.delete("nonexist".to_string()); + storage.commit(state_init).await?; + + Ok(()) +} + +#[tokio::test] +/// In rare cases, the database lock has not been (yet) released by the time +/// the next Storage::load() call is made. This is fixed by `Storage::release()` +/// which mimicks the behavior of an async drop (releasing resources). +async fn db_lock_is_released() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + storage.release().await; + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + storage.release().await; + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + storage.release().await; + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + storage.release().await; + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + storage.release().await; + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + storage.release().await; + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + storage.release().await; + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + storage.release().await; + + Ok(()) +} + +#[tokio::test] +async fn simple_flow() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + + // Initialize an empty Storage in the new directory + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + + // Version -1 to Version 0 writes + // + // tx00: test => test + // tx00: c/aa => 0 [object store] + // tx00: c/ab => 1 [object store] + // tx00: c/ac => 2 [object store] + // tx00: c/ad => 3 [object store] + // tx00: iA => A [nonverifiable store] + // tx00: iC => C [nonverifiable store] + // tx00: iF => F [nonverifiable store] + // tx00: iD => D [nonverifiable store] + // tx01: a/aa => aa + // tx01: a/aaa => aaa + // tx01: a/ab => ab + // tx01: a/z => z + // tx01: c/ab => 10 [object store] + // tx01: c/ac => [deleted] [object store] + // + // Version 0 to Version 1 writes + // tx10: test => [deleted] + // tx10: a/aaa => [deleted] + // tx10: a/c => c + // tx10: iB => B [nonverifiable store] + // tx11: a/ab => ab2 + // tx11: iD => [deleted] nonverifiable store] + + let mut state_init = StateDelta::new(storage.latest_snapshot()); + // Check that reads on an empty state return Ok(None) + assert_eq!(state_init.get_raw("test").await?, None); + assert_eq!(state_init.get_raw("a/aa").await?, None); + + // Create tx00 + let mut tx00 = StateDelta::new(&mut state_init); + tx00.put_raw("test".to_owned(), b"test".to_vec()); + tx00.object_put("c/aa", 0u64); + tx00.object_put("c/ab", 1u64); + tx00.object_put("c/ac", 2u64); + tx00.object_put("c/ad", 3u64); + tx00.nonverifiable_put_raw(b"iA".to_vec(), b"A".to_vec()); + tx00.nonverifiable_put_raw(b"iC".to_vec(), b"C".to_vec()); + tx00.nonverifiable_put_raw(b"iF".to_vec(), b"F".to_vec()); + tx00.nonverifiable_put_raw(b"iD".to_vec(), b"D".to_vec()); + + // Check reads against tx00: + // This is present in tx00 + assert_eq!(tx00.get_raw("test").await?, Some(b"test".to_vec())); + // This is missing in tx00 and state_init and tree is empty + assert_eq!(tx00.get_raw("a/aa").await?, None); + // Present in tx00 object store + assert_eq!(tx00.object_get("c/aa"), Some(0u64)); + assert_eq!(tx00.object_get("c/ab"), Some(1u64)); + assert_eq!(tx00.object_get("c/ac"), Some(2u64)); + assert_eq!(tx00.object_get("c/ad"), Some(3u64)); + // Missing in tx00 object store + assert_eq!(tx00.object_get::("nonexist"), None); + // Nonconsensus range checks + let mut range = tx00.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Now apply the transaction to state_init + tx00.apply(); + assert_eq!(state_init.get_raw("test").await?, Some(b"test".to_vec())); + assert_eq!(state_init.get_raw("a/aa").await?, None); + // Present in state_init object store + assert_eq!(state_init.object_get("c/aa"), Some(0u64)); + assert_eq!(state_init.object_get("c/ab"), Some(1u64)); + assert_eq!(state_init.object_get("c/ac"), Some(2u64)); + assert_eq!(state_init.object_get("c/ad"), Some(3u64)); + // Missing in state_init object store + assert_eq!(state_init.object_get::("nonexist"), None); + // Nonconsensus range checks + let mut range = state_init.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Create a transaction writing the other keys. + let mut tx01 = StateDelta::new(&mut state_init); + tx01.put_raw("a/aa".to_owned(), b"aa".to_vec()); + tx01.put_raw("a/aaa".to_owned(), b"aaa".to_vec()); + tx01.put_raw("a/ab".to_owned(), b"ab".to_vec()); + tx01.put_raw("a/z".to_owned(), b"z".to_vec()); + tx01.object_put("c/ab", 10u64); + tx01.object_delete("c/ac"); + + // Check reads against tx01: + // This is missing in tx01 and reads through to state_init + assert_eq!(tx01.get_raw("test").await?, Some(b"test".to_vec())); + // This is present in tx01 + assert_eq!(tx01.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(tx01.get_raw("a/aaa").await?, Some(b"aaa".to_vec())); + assert_eq!(tx01.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(tx01.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is missing in tx01 and in state_init + assert_eq!(tx01.get_raw("a/c").await?, None); + let mut range = tx01.prefix_raw("a/"); + let mut range_keys = tx01.prefix_keys("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/aa".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aaa".to_owned(), b"aaa".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/aaa".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/ab".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range_keys.next().await.transpose()?, Some("a/z".to_owned())); + assert_eq!(range.next().await.transpose()?, None); + assert_eq!(range_keys.next().await.transpose()?, None); + std::mem::drop(range); + std::mem::drop(range_keys); + + // Now apply the transaction to state_init + tx01.apply(); + + // Check reads against state_init: + // This is present in state_init + assert_eq!(state_init.get_raw("test").await?, Some(b"test".to_vec())); + assert_eq!(state_init.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state_init.get_raw("a/aaa").await?, Some(b"aaa".to_vec())); + assert_eq!(state_init.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(state_init.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is missing in state_init + assert_eq!(state_init.get_raw("a/c").await?, None); + let mut range = state_init.prefix_raw("a/"); + let mut range_keys = state_init.prefix_keys("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/aa".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aaa".to_owned(), b"aaa".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/aaa".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/ab".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range_keys.next().await.transpose()?, Some("a/z".to_owned())); + assert_eq!(range.next().await.transpose()?, None); + assert_eq!(range_keys.next().await.transpose()?, None); + std::mem::drop(range); + std::mem::drop(range_keys); + + // Now commit state_init to storage + storage.commit(state_init).await?; + + // Now we have version 0. + let state0 = storage.latest_snapshot(); + assert_eq!(state0.version(), 0); + let mut state0 = StateDelta::new(state0); + + // Check reads against state0: + // This is missing in state0 and present in JMT + assert_eq!(state0.get_raw("test").await?, Some(b"test".to_vec())); + assert_eq!(state0.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state0.get_raw("a/aaa").await?, Some(b"aaa".to_vec())); + assert_eq!(state0.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(state0.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is missing in state0 and missing in JMT + assert_eq!(state0.get_raw("a/c").await?, None); + let mut range = state0.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aaa".to_owned(), b"aaa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus prefix checks + let mut range = state0.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Start building a transaction + let mut tx10 = StateDelta::new(&mut state0); + tx10.delete("test".to_owned()); + tx10.delete("a/aaa".to_owned()); + tx10.put_raw("a/c".to_owned(), b"c".to_vec()); + tx10.nonverifiable_put_raw(b"iB".to_vec(), b"B".to_vec()); + + // Check reads against tx10: + // This is deleted in tx10, missing in state0, present in JMT + assert_eq!(tx10.get_raw("test").await?, None); + assert_eq!(tx10.get_raw("a/aaa").await?, None); + // This is missing in tx10, missing in state0, present in JMT + assert_eq!(tx10.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(tx10.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(tx10.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is present in tx10, missing in state0, missing in JMT + assert_eq!(tx10.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = tx10.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus prefix checks + let mut range = tx10.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Apply tx10 to state0 + tx10.apply(); + + // Check reads against state0 + // This is deleted in state0, present in JMT + assert_eq!(state0.get_raw("test").await?, None); + assert_eq!(state0.get_raw("a/aaa").await?, None); + // This is missing in state0, present in JMT + assert_eq!(state0.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state0.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(state0.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is present in state0, missing in JMT + assert_eq!(state0.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = state0.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Start building another transaction + let mut tx11 = StateDelta::new(&mut state0); + tx11.put_raw("a/ab".to_owned(), b"ab2".to_vec()); + tx11.nonverifiable_delete(b"iD".to_vec()); + + // Check reads against tx11: + // This is present in tx11, missing in state0, present in JMT + assert_eq!(tx11.get_raw("a/ab").await?, Some(b"ab2".to_vec())); + // This is missing in tx11, deleted in state0, present in JMT + assert_eq!(tx11.get_raw("test").await?, None); + assert_eq!(tx11.get_raw("a/aaa").await?, None); + // This is missing in tx11, missing in state0, present in JMT + assert_eq!(tx11.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(tx11.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is missing in tx10, present in state0, missing in JMT + assert_eq!(tx11.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = tx11.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus range checks + let mut range = tx11.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Apply tx11 to state0 + tx11.apply(); + + // Check reads against state0 + // This is deleted in state0, present in JMT + assert_eq!(state0.get_raw("test").await?, None); + assert_eq!(state0.get_raw("a/aaa").await?, None); + // This is missing in state0, present in JMT + assert_eq!(state0.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state0.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is present in state0, missing in JMT + assert_eq!(state0.get_raw("a/c").await?, Some(b"c".to_vec())); + // This is present in state0, present in JMT + assert_eq!(state0.get_raw("a/ab").await?, Some(b"ab2".to_vec())); + let mut range = state0.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + let mut range = state0.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Create another fork of state 0 while we've edited the first one but before we commit. + let state0a = storage.latest_snapshot(); + assert_eq!(state0a.version(), 0); + + // Commit state0 as state1. + storage.commit(state0).await?; + + let state1 = storage.latest_snapshot(); + assert_eq!(state1.version(), 1); + + // Check reads against state1 + assert_eq!(state1.get_raw("test").await?, None); + assert_eq!(state1.get_raw("a/aaa").await?, None); + assert_eq!(state1.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state1.get_raw("a/ab").await?, Some(b"ab2".to_vec())); + assert_eq!(state1.get_raw("a/z").await?, Some(b"z".to_vec())); + assert_eq!(state1.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = state1.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + let mut range = state1.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Check reads against state0a + assert_eq!(state0a.get_raw("test").await?, Some(b"test".to_vec())); + assert_eq!(state0a.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state0a.get_raw("a/aaa").await?, Some(b"aaa".to_vec())); + assert_eq!(state0a.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(state0a.get_raw("a/z").await?, Some(b"z".to_vec())); + assert_eq!(state0a.get_raw("a/c").await?, None); + let mut range = state0a.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aaa".to_owned(), b"aaa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus range checks + let mut range = state0a.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Now, check that closing and reloading works. + + // First, be sure to explicitly drop anything keeping a reference to the + // RocksDB instance: + storage.release().await; + // std::mem::drop(state0); // consumed in commit() + std::mem::drop(state0a); + std::mem::drop(state1); + + // Now reload the storage from the same directory... + let storage_a = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + let state1a = storage_a.latest_snapshot(); + + // Check that we reload at the correct version ... + assert_eq!(state1a.version(), 1); + + // Check reads against state1a after reloading the DB + assert_eq!(state1a.get_raw("test").await?, None); + assert_eq!(state1a.get_raw("a/aaa").await?, None); + assert_eq!(state1a.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state1a.get_raw("a/ab").await?, Some(b"ab2".to_vec())); + assert_eq!(state1a.get_raw("a/z").await?, Some(b"z".to_vec())); + assert_eq!(state1a.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = state1a.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus range checks + let mut range = state1a.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + let mut range = state1a.nonverifiable_range_raw(Some(b"i"), b"A".to_vec()..b"C".to_vec())?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + let mut range = state1a.nonverifiable_range_raw(Some(b"i"), b"B".to_vec()..b"C".to_vec())?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = state1a.nonverifiable_range_raw(Some(b"i"), b"A".to_vec()..b"F".to_vec())?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = state1a.nonverifiable_range_raw(Some(b"i"), b"A".to_vec()..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + + let mut range = state1a.nonverifiable_range_raw(Some(b"i"), ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = state1a.nonverifiable_range_raw(None, b"i".to_vec()..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = state1a.nonverifiable_range_raw(None, b"iA".to_vec()..b"iB".to_vec())?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + Ok(()) +} + +#[tokio::test] +/// Test that range queries over the nonverifiable store work as expected. +/// A range query can have a prefix, a start key, and an end key, so we want to test: +/// - queries with no prefix, no start key, and no end key +/// - queries with no prefix, a start key, and no end key +/// - queries with no prefix, no start key, and an end key +/// - queries with no prefix, a start key, and an end key +/// - queries with a prefix, no start key, and no end key +/// - queries with a prefix, a start key, and no end key +/// - queries with a prefix, no start key, and an end key +async fn range_queries_basic() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + + let mut state_init = StateDelta::new(storage.latest_snapshot()); + + // Check that range queries over an empty store work does not return any results. + let mut range = state_init.nonverifiable_range_raw(None, ..)?; + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut tx00 = StateDelta::new(&mut state_init); + tx00.nonverifiable_put_raw(b"iA".to_vec(), b"A".to_vec()); + tx00.nonverifiable_put_raw(b"iC".to_vec(), b"C".to_vec()); + tx00.nonverifiable_put_raw(b"iF".to_vec(), b"F".to_vec()); + tx00.nonverifiable_put_raw( + b"random_key_not_prefixed".to_vec(), + b"quetzalcoatl".to_vec(), + ); + + // Check that keys with the wrong prefix are not included in the results. + let mut range = tx00.nonverifiable_range_raw(Some(b"i"), ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Insert an entry that precedes the current last entry + tx00.nonverifiable_put_raw(b"iD".to_vec(), b"D".to_vec()); + + // Check that the new entry is included in the results. + let mut range = tx00.nonverifiable_range_raw(Some(b"i"), ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + tx00.apply(); + // Check that the new entry is included in the results. + let mut range = state_init.nonverifiable_range_raw(Some(b"i"), ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut tx01 = StateDelta::new(&mut state_init); + tx01.nonverifiable_delete(b"iA".to_vec()); + tx01.nonverifiable_put_raw(b"iC".to_vec(), b"China".to_vec()); + tx01.nonverifiable_put_raw(b"iD".to_vec(), b"Denmark".to_vec()); + tx01.nonverifiable_put_raw(b"iF".to_vec(), b"Finland".to_vec()); + + // Check that the updated entries are included in the results. + let mut range = tx01.nonverifiable_range_raw(Some(b"i"), ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"China".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"Denmark".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"Finland".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Check that setting the lower bound to the first key doesn't change the results. + let mut range = tx01.nonverifiable_range_raw(Some(b"i"), b"C".to_vec()..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"China".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"Denmark".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"Finland".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Check that setting the upper bound to a key that doesn't exist doesn't change the results. + let mut range = tx01.nonverifiable_range_raw(Some(b"i"), ..b"Z".to_vec())?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"China".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"Denmark".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"Finland".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Check that using a manually prefixed keys doesn't change the results. + let mut range = tx01.nonverifiable_range_raw(None, b"i".to_vec()..b"iZ".to_vec())?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"China".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"Denmark".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"Finland".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + tx01.apply(); + + // Check that all entries are included in the results. + let mut range = state_init.nonverifiable_range_raw(None, ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"China".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"Denmark".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"Finland".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(( + b"random_key_not_prefixed".to_vec(), + b"quetzalcoatl".to_vec() + )) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut tx02 = StateDelta::new(&mut state_init); + + for i in 0..=100 { + tx02.nonverifiable_put_raw( + format!("compact_block/{:020}", i).as_bytes().to_vec(), + format!("{}", i).as_bytes().to_vec(), + ); + } + + // Check that all compact blocks are included in the results. + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), ..)?; + for i in 0..=100 { + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", i).as_bytes().to_vec(), + format!("{}", i).as_bytes().to_vec() + )) + ); + } + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + let cb_10 = format!("{:020}", 10).as_bytes().to_vec(); + + // Check that setting a lower bound works. + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), cb_10.clone()..)?; + for i in 10..=100 { + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", i).as_bytes().to_vec(), + format!("{}", i).as_bytes().to_vec() + )), + "i={}", + i + ); + } + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let cb_20 = format!("{:020}", 20).as_bytes().to_vec(); + + // Check that specifying a full range works. + let mut range = + tx02.nonverifiable_range_raw(Some(b"compact_block/"), cb_10.clone()..cb_20.clone())?; + for i in 10..20 { + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", i).as_bytes().to_vec(), + format!("{}", i).as_bytes().to_vec() + )) + ); + } + assert_eq!(range.next().await.transpose()?, None); + + // Check that leaving the lower bound unspecified works. + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), ..cb_20)?; + for i in 0..20 { + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", i).as_bytes().to_vec(), + format!("{}", i).as_bytes().to_vec() + )) + ); + } + assert_eq!(range.next().await.transpose()?, None); + + // Delete compact blocks [9;21] + let deleted_keys = (9..=21) + .map(|i| format!("compact_block/{:020}", i).as_bytes().to_vec()) + .collect::>(); + for key in deleted_keys.iter() { + tx02.nonverifiable_delete(key.clone()); + } + + // Check that the deleted compact blocks are not included in the results. + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), ..)?; + for i in 0..=100 { + if (9..=21).contains(&i) { + continue; + } + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", i).as_bytes().to_vec(), + format!("{}", i).as_bytes().to_vec() + )), + "i={}", + i + ); + } + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let cb_9 = format!("{:020}", 9).as_bytes().to_vec(); + let cb_15 = format!("{:020}", 15).as_bytes().to_vec(); + let cb_21 = format!("{:020}", 21).as_bytes().to_vec(); + let cb_22 = format!("{:020}", 22).as_bytes().to_vec(); + let cb_23 = format!("{:020}", 23).as_bytes().to_vec(); + + // Check that the deleted compact blocks are not included in the results, even if they're in the bound argument. + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), cb_9.clone()..)?; + for i in 22..=100 { + let found = range.next().await.transpose()?; + let foundstr = String::from_utf8(found.clone().unwrap().0).unwrap(); + println!("{i}: foundstr={}", foundstr); + assert_eq!( + found, + Some(( + format!("compact_block/{:020}", i).as_bytes().to_vec(), + format!("{}", i).as_bytes().to_vec() + )) + ); + } + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Check a variety of deleted bounds. + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), cb_9.clone()..cb_15)?; + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), cb_9.clone()..cb_21)?; + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), cb_9.clone()..cb_22)?; + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = tx02.nonverifiable_range_raw(Some(b"compact_block/"), cb_9.clone()..cb_23)?; + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", 22).as_bytes().to_vec(), + format!("{}", 22).as_bytes().to_vec() + )) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + Ok(()) +} + +#[tokio::test] +/// Test that overwrites work correctly, and that we can read the latest value. +async fn range_query_overwrites() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + + let state_init = StateDelta::new(storage.latest_snapshot()); + // Check that reads on an empty state return Ok(None) + let mut range = state_init.nonverifiable_range_raw(None, ..)?; + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + Ok(()) +} + +#[tokio::test] +/// Test that inserting a value that precedes the peeked value works. +async fn range_query_prepend_peeked_value() -> anyhow::Result<()> { + use crate::StateWrite; + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + + let mut state_init = StateDelta::new(storage.latest_snapshot()); + state_init.nonverifiable_put_raw(b"b".to_vec(), b"beluga".to_vec()); + state_init.nonverifiable_put_raw(b"c".to_vec(), b"charm".to_vec()); + storage.commit(state_init).await?; + + let mut state = StateDelta::new(storage.latest_snapshot()); + let mut range = state.nonverifiable_range_raw(None, ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"b".to_vec(), b"beluga".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"c".to_vec(), b"charm".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + state.nonverifiable_put_raw(b"a".to_vec(), b"aroma".to_vec()); + + // Check that the new value preceding the first peeked value is returned (no bound) + let mut range = state.nonverifiable_range_raw(None, ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"a".to_vec(), b"aroma".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"b".to_vec(), b"beluga".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"c".to_vec(), b"charm".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Check that the new value preceding the first peeked value is returned (with bound) + let mut range = state.nonverifiable_range_raw(None, b"a".to_vec()..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"a".to_vec(), b"aroma".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"b".to_vec(), b"beluga".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"c".to_vec(), b"charm".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Check that the new value preceding the first peeked value is NOT returned. + let mut range = state.nonverifiable_range_raw(None, b"b".to_vec()..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"b".to_vec(), b"beluga".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"c".to_vec(), b"charm".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + Ok(()) +} + +#[tokio::test] +/// Test that specifying an inverted range does not work. +async fn range_query_ordering() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + + let mut state_init = StateDelta::new(storage.latest_snapshot()); + let mut range = state_init.nonverifiable_range_raw(None, ..)?; + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + state_init.nonverifiable_put_raw(b"c/charm".to_vec(), b"charm".to_vec()); + state_init.nonverifiable_put_raw(b"a/aroma".to_vec(), b"aroma".to_vec()); + state_init.nonverifiable_put_raw(b"a/apple".to_vec(), b"apple".to_vec()); + state_init.nonverifiable_put_raw(b"b/boat".to_vec(), b"boat".to_vec()); + + let mut range = state_init.nonverifiable_range_raw(None, ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"a/apple".to_vec(), b"apple".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"a/aroma".to_vec(), b"aroma".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"b/boat".to_vec(), b"boat".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"c/charm".to_vec(), b"charm".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let lower = b"b/".to_vec(); + let upper = b"c/".to_vec(); + let mut range = state_init.nonverifiable_range_raw(None, lower..upper)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"b/boat".to_vec(), b"boat".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let upper1 = b"c/".to_vec(); + let upper2 = b"c/".to_vec(); + let mut range = state_init.nonverifiable_range_raw(None, upper1..upper2)?; + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let lower = b"b/".to_vec(); + let upper = b"c/".to_vec(); + let range = state_init.nonverifiable_range_raw(None, upper..lower); + assert!(range.is_err()); + std::mem::drop(range); + + Ok(()) +} + +#[tokio::test] +/// Test that passing in an absurd range does not work. +async fn range_query_bad_range() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + + let state_init = StateDelta::new(storage.latest_snapshot()); + + let lower = format!("{:020}", 0).as_bytes().to_vec(); + let upper = format!("{:020}", 0).as_bytes().to_vec(); + + // Inclusive range are not supported. + let range = state_init.nonverifiable_range_raw(None, lower..=upper); + assert!(range.is_err()); + std::mem::drop(range); + + let lower = format!("{:020}", 0).as_bytes().to_vec(); + let upper = format!("{:020}", 1).as_bytes().to_vec(); + // Inclusive range are not supported. + let range = state_init.nonverifiable_range_raw(None, upper..lower); + assert!(range.is_err()); + + Ok(()) +} + +#[tokio::test] +/// Test that the semantics of the range query do not change when the state is +/// persisted. +async fn range_query_storage_basic() -> anyhow::Result<()> { + use crate::read::StateRead; + use crate::write::StateWrite; + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + let mut delta = StateDelta::new(storage.latest_snapshot()); + + for height in 0..100 { + delta.nonverifiable_put_raw( + format!("compact_block/{:020}", height).as_bytes().to_vec(), + format!("compact_block/{:020}", height).as_bytes().to_vec(), + ); + } + + // We insert keys before the compact block keys. + for key in 0..10 { + delta.nonverifiable_put_raw( + format!("ante/{:020}", key).as_bytes().to_vec(), + format!("ante/{:020}", key).as_bytes().to_vec(), + ); + } + + // We insert keys after the compact block keys. + for key in 0..10 { + delta.nonverifiable_put_raw( + format!("post/{:020}", key).as_bytes().to_vec(), + format!("postÆ’/{:020}", key).as_bytes().to_vec(), + ); + } + + storage.commit(delta).await?; + + let state_init = storage.latest_snapshot(); + let mut range = state_init.nonverifiable_range_raw(Some(b"compact_block/"), ..)?; + for height in 0..100 { + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", height).as_bytes().to_vec(), + format!("compact_block/{:020}", height).as_bytes().to_vec() + )), + "height: {}", + height + ); + } + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let cb_50 = format!("{:020}", 50).as_bytes().to_vec(); + let cb_80 = format!("{:020}", 80).as_bytes().to_vec(); + let mut range = state_init.nonverifiable_range_raw(Some(b"compact_block/"), cb_50..cb_80)?; + for height in 50..80 { + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", height).as_bytes().to_vec(), + format!("compact_block/{:020}", height).as_bytes().to_vec() + )), + "height: {}", + height + ); + } + assert_eq!(range.next().await.transpose()?, None); + + let cb_80 = format!("{:020}", 80).as_bytes().to_vec(); + let mut range = state_init.nonverifiable_range_raw(Some(b"compact_block/"), ..cb_80)?; + for height in 0..80 { + assert_eq!( + range.next().await.transpose()?, + Some(( + format!("compact_block/{:020}", height).as_bytes().to_vec(), + format!("compact_block/{:020}", height).as_bytes().to_vec() + )), + "height: {}", + height + ); + } + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + Ok(()) +} + +#[tokio::test] +/// Test that prefixed range queries work over the persisted state. +async fn range_query_storage() -> anyhow::Result<()> { + use crate::read::StateRead; + use crate::write::StateWrite; + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + + let storage = Storage::load(tmpdir.path().to_owned(), vec![]).await?; + let mut delta = StateDelta::new(storage.latest_snapshot()); + + delta.nonverifiable_put_raw(b"a/aaaaa".to_vec(), b"1".to_vec()); + delta.nonverifiable_put_raw(b"a/aaaab".to_vec(), b"2".to_vec()); + delta.nonverifiable_put_raw(b"a/aaaac".to_vec(), b"3".to_vec()); + delta.nonverifiable_put_raw(b"b/boat".to_vec(), b"4".to_vec()); + storage.commit(delta).await?; + + let state_init = storage.latest_snapshot(); + let mut range = state_init.nonverifiable_range_raw(Some(b"a/"), ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"a/aaaaa".to_vec(), b"1".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"a/aaaab".to_vec(), b"2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"a/aaaac".to_vec(), b"3".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = state_init.nonverifiable_range_raw(Some(b"a/"), b"aaaa".to_vec()..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"a/aaaaa".to_vec(), b"1".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"a/aaaab".to_vec(), b"2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"a/aaaac".to_vec(), b"3".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = state_init.nonverifiable_range_raw(Some(b"b/"), b"chorizo".to_vec()..)?; + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + let mut range = state_init.nonverifiable_range_raw(Some(b"b/"), ..)?; + assert_eq!( + range.next().await.transpose()?, + Some((b"b/boat".to_vec(), b"4".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + Ok(()) +} diff --git a/crates/cnidarium/src/tests/delta.rs b/crates/cnidarium/src/tests/delta.rs new file mode 100644 index 0000000000..ba1fae3a3f --- /dev/null +++ b/crates/cnidarium/src/tests/delta.rs @@ -0,0 +1,721 @@ +/* +#[tokio::test] +async fn garden_of_forking_paths() -> anyhow::Result<()> { + tracing_subscriber::fmt::init(); + let storage = TempStorage::new().await?; + + let mut state_init = storage.latest_snapshot(); + + // TODO: do we still want to have StateTransaction ? + // what if we just made StateDelta be StateTransaction ? + // what are the downsides? forced allocation for range queries? + // where do we get the events out? + let mut tx = state_init.begin_transaction(); + tx.put_raw("base".to_owned(), b"base".to_vec()); + tx.apply(); + storage.commit(state_init).await?; + + let mut state = storage.latest_snapshot(); + let mut tx = state.begin_transaction(); + + // We can create a StateDelta from a borrow, it will take ownership of the borrow while the family is live + let mut delta = StateDelta::new(&mut tx); + delta.put_raw("delta".to_owned(), b"delta".to_vec()); + + // We can also nest StateDeltas -- unlike fork, this will only flatten down to the nesting point. + let mut d2 = StateDelta::new(&mut delta); + + let mut delta_a = d2.fork(); + let mut delta_b = d2.fork(); + delta_a.put_raw("delta".to_owned(), b"delta_a".to_vec()); + delta_b.put_raw("delta".to_owned(), b"delta_b".to_vec()); + let mut delta_a_base = delta_a.fork(); + let mut delta_b_base = delta_b.fork(); + delta_a_base.delete("base".to_owned()); + delta_b_base.delete("base".to_owned()); + + assert_eq!(delta_a.get_raw("base").await?, Some(b"base".to_vec())); + assert_eq!(delta_a.get_raw("base").await?, Some(b"base".to_vec())); + assert_eq!(delta_a_base.get_raw("base").await?, None); + assert_eq!(delta_b_base.get_raw("base").await?, None); + + assert_eq!(delta_a.get_raw("delta").await?, Some(b"delta_a".to_vec())); + assert_eq!( + delta_a_base.get_raw("delta").await?, + Some(b"delta_a".to_vec()) + ); + assert_eq!(delta_b.get_raw("delta").await?, Some(b"delta_b".to_vec())); + assert_eq!( + delta_b_base.get_raw("delta").await?, + Some(b"delta_b".to_vec()) + ); + + // Pick one we like and apply it, releasing the &mut delta reference... + // Note: flattens delta_b_base -> delta_b -> delta and stops! + delta_b_base.apply(); + // ... so we can read from delta again. + assert_eq!(delta.get_raw("base").await?, None); + assert_eq!(delta.get_raw("delta").await?, Some(b"delta_b".to_vec())); + + delta.apply(); + tx.apply(); + storage.commit(state).await?; + + let state = storage.latest_snapshot(); + assert_eq!(state.get_raw("base").await?, None); + assert_eq!(state.get_raw("delta").await?, Some(b"delta_b".to_vec())); + + Ok(()) +} + +#[tokio::test] +async fn simple_flow() -> anyhow::Result<()> { + //tracing_subscriber::fmt::init(); + let tmpdir = tempfile::tempdir()?; + + // Initialize an empty Storage in the new directory + let storage = Storage::load(tmpdir.path().to_owned()).await?; + + // Version -1 to Version 0 writes + // + // tx00: test => test + // tx00: c/aa => 0 [object store] + // tx00: c/ab => 1 [object store] + // tx00: c/ac => 2 [object store] + // tx00: c/ad => 3 [object store] + // tx00: iA => A [nonverifiable store] + // tx00: iC => C [nonverifiable store] + // tx00: iF => F [nonverifiable store] + // tx00: iD => D [nonverifiable store] + // tx01: a/aa => aa + // tx01: a/aaa => aaa + // tx01: a/ab => ab + // tx01: a/z => z + // tx01: c/ab => 10 [object store] + // tx01: c/ac => [deleted] [object store] + // + // Version 0 to Version 1 writes + // tx10: test => [deleted] + // tx10: a/aaa => [deleted] + // tx10: a/c => c + // tx10: iB => B [nonverifiable store] + // tx11: a/ab => ab2 + // tx11: iD => [deleted] nonverifiable store] + + let mut state_init = StateDelta::new(storage.latest_snapshot()); + // Check that reads on an empty state return Ok(None) + assert_eq!(state_init.get_raw("test").await?, None); + assert_eq!(state_init.get_raw("a/aa").await?, None); + + // Create tx00 + let mut tx00 = StateDelta::new(&mut state_init); + tx00.put_raw("test".to_owned(), b"test".to_vec()); + tx00.object_put("c/aa", 0u64); + tx00.object_put("c/ab", 1u64); + tx00.object_put("c/ac", 2u64); + tx00.object_put("c/ad", 3u64); + tx00.nonverifiable_put_raw(b"iA".to_vec(), b"A".to_vec()); + tx00.nonverifiable_put_raw(b"iC".to_vec(), b"C".to_vec()); + tx00.nonverifiable_put_raw(b"iF".to_vec(), b"F".to_vec()); + tx00.nonverifiable_put_raw(b"iD".to_vec(), b"D".to_vec()); + + // Check reads against tx00: + // This is present in tx00 + assert_eq!(tx00.get_raw("test").await?, Some(b"test".to_vec())); + // This is missing in tx00 and state_init and tree is empty + assert_eq!(tx00.get_raw("a/aa").await?, None); + // Present in tx00 object store + assert_eq!(tx00.object_get("c/aa"), Some(0u64)); + assert_eq!(tx00.object_get("c/ab"), Some(1u64)); + assert_eq!(tx00.object_get("c/ac"), Some(2u64)); + assert_eq!(tx00.object_get("c/ad"), Some(3u64)); + // Present in tx00 object store but requested with wrong type + assert_eq!(tx00.object_get::("c/aa"), None); + // Missing in tx00 object store + assert_eq!(tx00.object_get::("nonexist"), None); + // Nonconsensus range checks + let mut range = tx00.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Now apply the transaction to state_init + tx00.apply(); + assert_eq!(state_init.get_raw("test").await?, Some(b"test".to_vec())); + assert_eq!(state_init.get_raw("a/aa").await?, None); + // Present in state_init object store + assert_eq!(state_init.object_get("c/aa"), Some(0u64)); + assert_eq!(state_init.object_get("c/ab"), Some(1u64)); + assert_eq!(state_init.object_get("c/ac"), Some(2u64)); + assert_eq!(state_init.object_get("c/ad"), Some(3u64)); + // Present in state_init object store but requested with wrong type + assert_eq!(state_init.object_get::("c/aa"), None); + // Missing in state_init object store + assert_eq!(state_init.object_get::("nonexist"), None); + // Nonconsensus range checks + let mut range = state_init.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Create a transaction writing the other keys. + let mut tx01 = StateDelta::new(&mut state_init); + tx01.put_raw("a/aa".to_owned(), b"aa".to_vec()); + tx01.put_raw("a/aaa".to_owned(), b"aaa".to_vec()); + tx01.put_raw("a/ab".to_owned(), b"ab".to_vec()); + tx01.put_raw("a/z".to_owned(), b"z".to_vec()); + tx01.object_put("c/ab", 10u64); + tx01.object_delete("c/ac"); + + // Check reads against tx01: + // This is missing in tx01 and reads through to state_init + assert_eq!(tx01.get_raw("test").await?, Some(b"test".to_vec())); + // This is present in tx01 + assert_eq!(tx01.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(tx01.get_raw("a/aaa").await?, Some(b"aaa".to_vec())); + assert_eq!(tx01.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(tx01.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is missing in tx01 and in state_init + assert_eq!(tx01.get_raw("a/c").await?, None); + let mut range = tx01.prefix_raw("a/"); + let mut range_keys = tx01.prefix_keys("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/aa".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aaa".to_owned(), b"aaa".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/aaa".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/ab".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range_keys.next().await.transpose()?, Some("a/z".to_owned())); + assert_eq!(range.next().await.transpose()?, None); + assert_eq!(range_keys.next().await.transpose()?, None); + std::mem::drop(range); + std::mem::drop(range_keys); + + // Now apply the transaction to state_init + tx01.apply(); + + // Check reads against state_init: + // This is present in state_init + assert_eq!(state_init.get_raw("test").await?, Some(b"test".to_vec())); + assert_eq!(state_init.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state_init.get_raw("a/aaa").await?, Some(b"aaa".to_vec())); + assert_eq!(state_init.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(state_init.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is missing in state_init + assert_eq!(state_init.get_raw("a/c").await?, None); + let mut range = state_init.prefix_raw("a/"); + let mut range_keys = state_init.prefix_keys("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/aa".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aaa".to_owned(), b"aaa".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/aaa".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range_keys.next().await.transpose()?, + Some("a/ab".to_owned()) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range_keys.next().await.transpose()?, Some("a/z".to_owned())); + assert_eq!(range.next().await.transpose()?, None); + assert_eq!(range_keys.next().await.transpose()?, None); + std::mem::drop(range); + std::mem::drop(range_keys); + + // Now commit state_init to storage + storage.commit_delta(state_init).await?; + + // Now we have version 0. + let mut state0 = StateDelta::new(storage.latest_snapshot()); + //assert_eq!(state0.version(), 0); + // Check reads against state0: + // This is missing in state0 and present in JMT + assert_eq!(state0.get_raw("test").await?, Some(b"test".to_vec())); + assert_eq!(state0.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state0.get_raw("a/aaa").await?, Some(b"aaa".to_vec())); + assert_eq!(state0.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(state0.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is missing in state0 and missing in JMT + assert_eq!(state0.get_raw("a/c").await?, None); + let mut range = state0.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aaa".to_owned(), b"aaa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus range checks + let mut range = state0.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Start building a transaction + let mut tx10 = StateDelta::new(&mut state0); + tx10.delete("test".to_owned()); + tx10.delete("a/aaa".to_owned()); + tx10.put_raw("a/c".to_owned(), b"c".to_vec()); + tx10.nonverifiable_put_raw(b"iB".to_vec(), b"B".to_vec()); + + // Check reads against tx10: + // This is deleted in tx10, missing in state0, present in JMT + assert_eq!(tx10.get_raw("test").await?, None); + assert_eq!(tx10.get_raw("a/aaa").await?, None); + // This is missing in tx10, missing in state0, present in JMT + assert_eq!(tx10.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(tx10.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(tx10.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is present in tx10, missing in state0, missing in JMT + assert_eq!(tx10.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = tx10.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus range checks + let mut range = tx10.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Apply tx10 to state0 + tx10.apply(); + + // Check reads against state0 + // This is deleted in state0, present in JMT + assert_eq!(state0.get_raw("test").await?, None); + assert_eq!(state0.get_raw("a/aaa").await?, None); + // This is missing in state0, present in JMT + assert_eq!(state0.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state0.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(state0.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is present in state0, missing in JMT + assert_eq!(state0.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = state0.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Start building another transaction + let mut tx11 = StateDelta::new(&mut state0); + tx11.put_raw("a/ab".to_owned(), b"ab2".to_vec()); + tx11.nonverifiable_delete(b"iD".to_vec()); + + // Check reads against tx11: + // This is present in tx11, missing in state0, present in JMT + assert_eq!(tx11.get_raw("a/ab").await?, Some(b"ab2".to_vec())); + // This is missing in tx11, deleted in state0, present in JMT + assert_eq!(tx11.get_raw("test").await?, None); + assert_eq!(tx11.get_raw("a/aaa").await?, None); + // This is missing in tx11, missing in state0, present in JMT + assert_eq!(tx11.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(tx11.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is missing in tx10, present in state0, missing in JMT + assert_eq!(tx11.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = tx11.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus range checks + let mut range = tx11.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Apply tx11 to state0 + tx11.apply(); + + // Check reads against state0 + // This is deleted in state0, present in JMT + assert_eq!(state0.get_raw("test").await?, None); + assert_eq!(state0.get_raw("a/aaa").await?, None); + // This is missing in state0, present in JMT + assert_eq!(state0.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state0.get_raw("a/z").await?, Some(b"z".to_vec())); + // This is present in state0, missing in JMT + assert_eq!(state0.get_raw("a/c").await?, Some(b"c".to_vec())); + // This is present in state0, present in JMT + assert_eq!(state0.get_raw("a/ab").await?, Some(b"ab2".to_vec())); + let mut range = state0.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + let mut range = state0.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Create another fork of state 0 while we've edited the first one but before we commit. + let state0a = storage.latest_snapshot(); + assert_eq!(state0a.version(), 0); + + // Commit state0 as state1. + storage.commit_delta(state0).await?; + + let state1 = storage.latest_snapshot(); + assert_eq!(state1.version(), 1); + + // Check reads against state1 + assert_eq!(state1.get_raw("test").await?, None); + assert_eq!(state1.get_raw("a/aaa").await?, None); + assert_eq!(state1.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state1.get_raw("a/ab").await?, Some(b"ab2".to_vec())); + assert_eq!(state1.get_raw("a/z").await?, Some(b"z".to_vec())); + assert_eq!(state1.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = state1.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + let mut range = state1.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Check reads against state0a + assert_eq!(state0a.get_raw("test").await?, Some(b"test".to_vec())); + assert_eq!(state0a.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state0a.get_raw("a/aaa").await?, Some(b"aaa".to_vec())); + assert_eq!(state0a.get_raw("a/ab").await?, Some(b"ab".to_vec())); + assert_eq!(state0a.get_raw("a/z").await?, Some(b"z".to_vec())); + assert_eq!(state0a.get_raw("a/c").await?, None); + let mut range = state0a.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aaa".to_owned(), b"aaa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus range checks + let mut range = state0a.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iD".to_vec(), b"D".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + // Now, check that closing and reloading works. + + // First, be sure to explicitly drop anything keeping a reference to the + // RocksDB instance: + std::mem::drop(storage); + // std::mem::drop(state0); // consumed in commit() + std::mem::drop(state0a); + std::mem::drop(state1); + + // Now reload the storage from the same directory... + let storage_a = Storage::load(tmpdir.path().to_owned()).await?; + let state1a = storage_a.latest_snapshot(); + + // Check that we reload at the correct version ... + assert_eq!(state1a.version(), 1); + + // Check reads against state1a after reloading the DB + assert_eq!(state1a.get_raw("test").await?, None); + assert_eq!(state1a.get_raw("a/aaa").await?, None); + assert_eq!(state1a.get_raw("a/aa").await?, Some(b"aa".to_vec())); + assert_eq!(state1a.get_raw("a/ab").await?, Some(b"ab2".to_vec())); + assert_eq!(state1a.get_raw("a/z").await?, Some(b"z".to_vec())); + assert_eq!(state1a.get_raw("a/c").await?, Some(b"c".to_vec())); + let mut range = state1a.prefix_raw("a/"); + assert_eq!( + range.next().await.transpose()?, + Some(("a/aa".to_owned(), b"aa".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/ab".to_owned(), b"ab2".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/c".to_owned(), b"c".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some(("a/z".to_owned(), b"z".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + // Nonconsensus range checks + let mut range = state1a.nonverifiable_prefix_raw(b"i"); + assert_eq!( + range.next().await.transpose()?, + Some((b"iA".to_vec(), b"A".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iB".to_vec(), b"B".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iC".to_vec(), b"C".to_vec())) + ); + assert_eq!( + range.next().await.transpose()?, + Some((b"iF".to_vec(), b"F".to_vec())) + ); + assert_eq!(range.next().await.transpose()?, None); + std::mem::drop(range); + + Ok(()) +} + + */ diff --git a/crates/cnidarium/src/utils.rs b/crates/cnidarium/src/utils.rs new file mode 100644 index 0000000000..0d04841c71 --- /dev/null +++ b/crates/cnidarium/src/utils.rs @@ -0,0 +1,34 @@ +use anyhow::bail; +/// Splits a range into a tuple of start and end bounds, ignoring the inclusive/exclusive +/// nature of the range bounds. And returns a tuple consisting of the range implementation, +/// and the start and end bounds. +/// # Errors +/// This method returns an error when the range is inclusive on the end bound, +/// and when the lower bound is greater than the upper bound. +#[allow(clippy::type_complexity)] +pub(crate) fn convert_bounds( + range: impl std::ops::RangeBounds>, +) -> anyhow::Result<( + impl std::ops::RangeBounds>, + (Option>, Option>), +)> { + let start = match range.start_bound() { + std::ops::Bound::Included(v) => Some(v.clone()), + std::ops::Bound::Excluded(v) => Some(v.clone()), + std::ops::Bound::Unbounded => None, + }; + + let end = match range.end_bound() { + std::ops::Bound::Included(_) => bail!("included end bound not supported"), + std::ops::Bound::Excluded(v) => Some(v.clone()), + std::ops::Bound::Unbounded => None, + }; + + if let (Some(k_start), Some(k_end)) = (&start, &end) { + if k_start > k_end { + bail!("lower bound is greater than upper bound") + } + } + + Ok((range, (start, end))) +} diff --git a/crates/cnidarium/src/write.rs b/crates/cnidarium/src/write.rs new file mode 100644 index 0000000000..ee461c3698 --- /dev/null +++ b/crates/cnidarium/src/write.rs @@ -0,0 +1,74 @@ +use crate::StateRead; +use std::{any::Any, collections::BTreeMap}; +use tendermint::abci; + +/// Write access to chain state. +pub trait StateWrite: StateRead + Send + Sync { + /// Puts raw bytes into the verifiable key-value store with the given key. + fn put_raw(&mut self, key: String, value: Vec); + + /// Delete a key from the verifiable key-value store. + fn delete(&mut self, key: String); + + /// Puts raw bytes into the non-verifiable key-value store with the given key. + fn nonverifiable_put_raw(&mut self, key: Vec, value: Vec); + + /// Delete a key from non-verifiable key-value storage. + fn nonverifiable_delete(&mut self, key: Vec); + + /// Puts an object into the ephemeral object store with the given key. + /// + /// # Panics + /// + /// If the object is already present in the store, but its type is not the same as the type of + /// `value`. + fn object_put(&mut self, key: &'static str, value: T); + + /// Deletes a key from the ephemeral object store. + fn object_delete(&mut self, key: &'static str); + + /// Merge a set of object changes into this `StateWrite`. + /// + /// Unlike `object_put`, this avoids re-boxing values and messing up the downcasting. + fn object_merge(&mut self, objects: BTreeMap<&'static str, Option>>); + + /// Record that an ABCI event occurred while building up this set of state changes. + fn record(&mut self, event: abci::Event); +} + +impl<'a, S: StateWrite + Send + Sync> StateWrite for &'a mut S { + fn put_raw(&mut self, key: String, value: jmt::OwnedValue) { + (**self).put_raw(key, value) + } + + fn delete(&mut self, key: String) { + (**self).delete(key) + } + + fn nonverifiable_delete(&mut self, key: Vec) { + (**self).nonverifiable_delete(key) + } + + fn nonverifiable_put_raw(&mut self, key: Vec, value: Vec) { + (**self).nonverifiable_put_raw(key, value) + } + + fn object_put(&mut self, key: &'static str, value: T) { + (**self).object_put(key, value) + } + + fn object_delete(&mut self, key: &'static str) { + (**self).object_delete(key) + } + + fn object_merge( + &mut self, + objects: BTreeMap<&'static str, Option>>, + ) { + (**self).object_merge(objects) + } + + fn record(&mut self, event: abci::Event) { + (**self).record(event) + } +} diff --git a/crates/cnidarium/src/write_batch.rs b/crates/cnidarium/src/write_batch.rs new file mode 100644 index 0000000000..db86077898 --- /dev/null +++ b/crates/cnidarium/src/write_batch.rs @@ -0,0 +1,59 @@ +use std::sync::Arc; + +// HashMap is okay here because we don't care about ordering of substore roots. +use std::collections::HashMap; + +use crate::{ + cache::Cache, + store::{multistore, substore::SubstoreConfig}, + RootHash, +}; + +/// A staged write batch that can be committed to RocksDB. +/// +/// This allows for write batches to be prepared and committed at a later time. +pub struct StagedWriteBatch { + /// The write batch to commit to RocksDB. + pub(crate) write_batch: rocksdb::WriteBatch, + /// The new version of the chain state. + pub(crate) version: jmt::Version, + /// The new versions of each substore. + pub(crate) multistore_versions: multistore::MultistoreCache, + /// The root hash of the chain state corresponding to this set of changes. + pub(crate) root_hash: RootHash, + /// The configs, root hashes, and new versions of each substore + /// that was updated in this batch. + #[allow(clippy::disallowed_types)] + pub(crate) substore_roots: HashMap, (RootHash, u64)>, + /// Whether or not to perform a migration. + pub(crate) perform_migration: bool, + /// A lightweight copy of the changeset, this is useful to provide + /// a stream of changes to subscribers. + pub(crate) changes: Arc, +} + +impl StagedWriteBatch { + /// Returns the new version of the chain state corresponding to this set of changes. + pub fn version(&self) -> jmt::Version { + self.version + } + + /// Returns the root hash of the jmt corresponding to this set of changes. + pub fn root_hash(&self) -> &RootHash { + &self.root_hash + } + + /// Returns the version of a substore in this batch, if it exists + /// and `None` otherwise. + pub fn substore_version(&self, prefix: &str) -> Option { + let Some(substore_config) = self + .multistore_versions + .config + .find_substore(prefix.as_bytes()) + else { + return None; + }; + + self.multistore_versions.get_version(&substore_config) + } +} diff --git a/crates/cnidarium/tests/migration.rs b/crates/cnidarium/tests/migration.rs new file mode 100644 index 0000000000..231493357f --- /dev/null +++ b/crates/cnidarium/tests/migration.rs @@ -0,0 +1,1190 @@ +#![cfg(feature = "migration")] +use cnidarium::StateDelta; +use cnidarium::StateWrite; +use cnidarium::Storage; +use ibc_types::core::commitment::MerklePath; +use ibc_types::core::commitment::MerkleRoot; +use jmt::RootHash; +use once_cell::sync::Lazy; +use tempfile; +use tokio; + +/* + * Migration tests. + * + * Node operators perform network upgrades by running a migration of + * the chain state that preserve block height continuity. In order to + * enable this, we need to have a way to commit changes to our merkle + * tree, _without_ increasing its version number. + * + * With the addition of substores, we must cover the cases when migrations + * accesses data located in substores. + * + * These integration tests enforce that a migration operation is able to + * write to both the main store and any number of substores without incrementing + * their version number. + * + * Testing menu: + * - test_simple_migration: the most basic migration scenario where we write to the main store. + * - test_substore_migration: a migration scenario where we write to the main store and substores. + * - prop_test_substore_migration: property-based testing of the migration operation. + * + * Each test has the following pattern: + * Operation: + * Write a collection of keys, incrementing the version number at each step. + * Checks: + * - Check that the version number has incremented. + * - Check that the keys are present in the latest snapshot. + * - Check that the keys have valid proofs. + * Operation: + * Perform a migration, writing/removing a key in the main store and/or substores. + * - Check that the version number has not changed. + * - Check that the root hash for the main store and/or substores has changed. + * - Check that the migration key is present in the latest snapshot. + * - Check that the migration key has a valid proof. + * - Check that the migration key has the expected value. + * Operation: + * Write a new collection of keys, incrementing the version number at each step. + * Checks: + * - Check that the version number has incremented. + * - Check that the new keys are present in the latest snapshot. + * - Check that the new keys have valid proofs. + * - Check that the new keys have the expected values. + * Operation: + * Try to generate proofs for keys that are NOT present in the jmt + * Checks: + * - Check that no value is returned for the keys. + * - Check that the nonexistence proofs are valid. + */ + +/// The proof specs for the main store. +pub static MAIN_STORE_PROOF_SPEC: Lazy> = + Lazy::new(|| vec![cnidarium::ics23_spec()]); + +/// The proof specs for keys located in substores (e.g. `ibc` keys) +pub static FULL_PROOF_SPECS: Lazy> = + Lazy::new(|| vec![cnidarium::ics23_spec(), cnidarium::ics23_spec()]); + +#[tokio::test] +/// Test that we can commit to the main store without incrementing its version. +async fn test_simple_migration() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec![]; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()).await?; + + let mut counter = 0; + let num_ops = 10; + + /* ************************ */ + /* write some keys */ + /* ************************ */ + let mut kvs = vec![]; + for i in 0..num_ops { + /* write some value at version `i` */ + let mut delta = StateDelta::new(storage.latest_snapshot()); + let key = format!("key_{i}"); + let value = format!("value_{i}").as_bytes().to_vec(); + delta.put_raw(key.clone(), value.clone()); + let root_hash = storage.commit(delta).await?; + + tracing::info!(%key, ?root_hash, version = %i, "committed key-value pair"); + + kvs.push((key, value)); + counter += 1; + } + + assert_eq!(counter, num_ops); + counter = 0; + + // We don't _need_ to toss the storage instance, but let's be + // extra careful and make sure that we can load the storage. + storage.release().await; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()).await?; + let premigration_root = storage + .latest_snapshot() + .root_hash() + .await + .expect("infallible"); + + for (i, (key, value)) in kvs.clone().into_iter().enumerate() { + let snapshot = storage.latest_snapshot(); + let (some_value, proof) = snapshot.get_with_proof(key.as_bytes().to_vec()).await?; + let retrieved_value = some_value.expect("key is found in the latest snapshot"); + assert_eq!(retrieved_value, value); + + let merkle_path = MerklePath { + key_path: vec![key], + }; + let merkle_root = MerkleRoot { + hash: premigration_root.0.to_vec(), + }; + + proof + .verify_membership( + &MAIN_STORE_PROOF_SPEC, + merkle_root, + merkle_path, + retrieved_value, + 0, + ) + .map_err(|e| tracing::error!(?e, key_index = ?i, "proof verification failed")) + .expect("membership proof verifies"); + + counter += 1; + } + + assert_eq!(counter, num_ops); + counter = 0; + + let old_version = storage.latest_version(); + assert_eq!(old_version, num_ops - 1); + + /* ********************* */ + /* perform the migration */ + /* ********************* */ + let mut delta = StateDelta::new(storage.latest_snapshot()); + let migration_key = "banana".to_string(); + let migration_value = "a good fruit".as_bytes().to_vec(); + delta.put_raw(migration_key.clone(), migration_value.clone()); + let postmigration_root = storage.commit_in_place(delta).await?; + + // We have to reload the storage instance to get the latest snapshot. + storage.release().await; + let storage = Storage::load(db_path, substore_prefixes).await?; + + let new_version = storage.latest_version(); + + assert_ne!( + premigration_root, postmigration_root, + "migration should change the root hash" + ); + assert_eq!( + old_version, new_version, + "the post-migration version number should not change" + ); + + /* ************************ */ + /* check the migration */ + /* ************************ */ + let (some_value, proof) = storage + .latest_snapshot() + .get_with_proof(migration_key.as_bytes().to_vec()) + .await?; + let retrieved_value = some_value.expect("migration key is found in the latest snapshot"); + assert_eq!(retrieved_value, migration_value); + + let merkle_path = MerklePath { + key_path: vec![migration_key], + }; + let merkle_root = MerkleRoot { + hash: postmigration_root.0.to_vec(), + }; + + proof + .verify_membership( + &MAIN_STORE_PROOF_SPEC, + merkle_root, + merkle_path, + retrieved_value, + 0, + ) + .map_err(|e| tracing::error!("proof verification failed: {:?}", e)) + .expect("membership proof verifies"); + + /* ************************ */ + /* write new keys */ + /* ************************ */ + for i in num_ops..num_ops * 2 { + /* write some value at version `i` */ + let mut delta = StateDelta::new(storage.latest_snapshot()); + let key = format!("key_{i}"); + let value = format!("value_{i}").as_bytes().to_vec(); + delta.put_raw(key.clone(), value.clone()); + let root_hash = storage.commit(delta).await?; + + tracing::info!(%key, ?root_hash, version = %i, "committed key-value pair"); + + kvs.push((key, value)); + counter += 1; + } + + assert_eq!(counter, num_ops); + counter = 0; + + let final_root = storage + .latest_snapshot() + .root_hash() + .await + .expect("infaillible"); + + for (i, (key, value)) in kvs.clone().into_iter().enumerate() { + let snapshot = storage.latest_snapshot(); + let (some_value, proof) = snapshot.get_with_proof(key.as_bytes().to_vec()).await?; + let retrieved_value = some_value.expect("key is found in the latest snapshot"); + assert_eq!(retrieved_value, value); + + let merkle_path = MerklePath { + key_path: vec![key], + }; + let merkle_root = MerkleRoot { + hash: final_root.0.to_vec(), + }; + + proof + .verify_membership( + &MAIN_STORE_PROOF_SPEC, + merkle_root, + merkle_path, + retrieved_value, + 0, + ) + .map_err(|e| tracing::error!(?e, key_index = ?i, "proof verification failed")) + .expect("membership proof verifies"); + + counter += 1; + } + + assert_eq!(counter, num_ops * 2); + + /* ****************************** */ + /* read nonexistent keys */ + /* ****************************** */ + let final_snapshot = storage.latest_snapshot(); + let final_root = final_snapshot.root_hash().await.expect("infaillible"); + + let key = format!("nonexistent_key"); + let (some_value, proof) = final_snapshot + .get_with_proof(key.as_bytes().to_vec()) + .await?; + assert!(some_value.is_none()); + let merkle_path = MerklePath { + key_path: vec![key], + }; + let merkle_root = MerkleRoot { + hash: final_root.0.to_vec(), + }; + + proof + .verify_non_membership(&MAIN_STORE_PROOF_SPEC, merkle_root, merkle_path) + .map_err(|e| tracing::error!("proof verification failed: {:?}", e)) + .expect("nonmembership proof verifies"); + + Ok(()) +} + +#[tokio::test] +/// Test that we can commit to substores without incrementing their version. +async fn test_substore_migration() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec!["ibc".to_string(), "dex".to_string(), "misc".to_string()]; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()).await?; + + let mut counter = 0; + let num_ops_per_substore = 10; + + let mut kvs = vec![]; + + /* ************************ */ + /* write some keys */ + /* in every substore */ + /* ************************ */ + for i in 0..num_ops_per_substore { + let mut delta = StateDelta::new(storage.latest_snapshot()); + for substore in substore_prefixes.iter() { + let key = format!("{substore}/key_{i}"); + let value = format!("{substore}value_{i}").as_bytes().to_vec(); + kvs.push((key.clone(), value.clone())); + tracing::debug!(?key, "initializing substore {substore} with key-value pair"); + delta.put_raw(key.clone(), value.clone()); + } + + let root_hash = storage.commit(delta).await?; + tracing::info!(?root_hash, version = %i, "committed key-value pair"); + counter += 1; + } + let num_versions_pre_migration = counter; + assert_eq!(counter, num_ops_per_substore); + counter = 0; + + // We don't _need_ to toss the storage instance, but let's be + // extra careful and make sure that things work if we reload it. + storage.release().await; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()).await?; + + let premigration_root = storage + .latest_snapshot() + .root_hash() + .await + .expect("infaillible"); + + for (i, (key, value)) in kvs.clone().into_iter().enumerate() { + tracing::debug!(?key, "checking key-value pair"); + let snapshot = storage.latest_snapshot(); + let (some_value, proof) = snapshot.get_with_proof(key.as_bytes().to_vec()).await?; + let retrieved_value = some_value.expect("key is found in the latest snapshot"); + assert_eq!(retrieved_value, value); + + // We split the key into its substore prefix and the key itself. + let merkle_path = MerklePath { + key_path: key.split('/').map(|s| s.to_string()).collect(), + }; + let merkle_root = MerkleRoot { + hash: premigration_root.0.to_vec(), + }; + + proof + .verify_membership( + &FULL_PROOF_SPECS, + merkle_root, + merkle_path, + retrieved_value, + 0, + ) + .map_err(|e| tracing::error!(?e, key_index = ?i, "proof verification failed")) + .expect("membership proof verifies"); + + counter += 1; + } + + assert_eq!( + counter, + substore_prefixes.len() as u64 * num_ops_per_substore + ); + + let premigration_snapshot = storage.latest_snapshot(); + let mut old_root_hashes: Vec = vec![]; + for substore in substore_prefixes.iter() { + let root_hash = premigration_snapshot + .prefix_root_hash(substore.as_str()) + .await + .expect("prefix exists"); + old_root_hashes.push(root_hash); + } + + let old_substore_versions: Vec = substore_prefixes + .clone() + .into_iter() + .map(|prefix| { + let old_version = premigration_snapshot + .prefix_version(prefix.as_str()) + .expect("prefix exists"); + old_version.expect("substore is initialized") + }) + .collect(); + + let old_version = storage.latest_version(); + assert_eq!(old_version, num_versions_pre_migration - 1); // -1 because we start at u64::MAX + let premigration_root_hash = premigration_snapshot + .root_hash() + .await + .expect("infaillible"); + drop(premigration_snapshot); + + /* ******************************* */ + /* perform the migration */ + /* (write a key in every substore) */ + /* ******************************* */ + let mut delta = StateDelta::new(storage.latest_snapshot()); + let mut migration_kvs = vec![]; + + // Start by writing a key in every substore, including the main store. + for substore in substore_prefixes.iter() { + let key = format!("{substore}/banana", substore = substore); + let value = format!("{substore}", substore = substore) + .as_bytes() + .to_vec(); + tracing::debug!(?key, "migration: writing to substore {substore}"); + delta.put_raw(key.clone(), value.clone()); + migration_kvs.push((key, value)); + } + + // Commit the migration. + let _ = storage.commit_in_place(delta).await?; + + /* ************************ */ + /* check the migration */ + /* ************************ */ + // Overview: We just wrote a key in every substore. Now we want to perform increasingly + // complex checks to ensure that the migration was successful. + // 1. Check that every root hash has changed + // 2. Check that no version number has changed + // 3. Check that we can read the migration key from every substore + // 4. Check that the migration key has a valid proof + // 5. Check that we can read every other key from every substore + // 6. Check that every other key has a valid proof + + // We reload storage so that we can access the latest snapshot. + // The snapshot cache is not updated when we commit in place. + storage.release().await; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()).await?; + + let postmigration_snapshot = storage.latest_snapshot(); + let new_version = storage.latest_version(); + + assert_eq!( + old_version, new_version, + "the global version should not change" + ); + + let postmigration_root_hash = postmigration_snapshot + .root_hash() + .await + .expect("infaillible"); + + assert_ne!(premigration_root_hash, postmigration_root_hash); + + // Check that the root hash for every substore has changed. + let mut new_root_hashes: Vec = vec![]; + for substore in substore_prefixes.iter() { + let root_hash = postmigration_snapshot + .prefix_root_hash(substore.as_str()) + .await + .expect("prefix exists"); + new_root_hashes.push(root_hash); + } + + old_root_hashes + .iter() + .zip(new_root_hashes.iter()) + .zip(substore_prefixes.iter()) + .for_each(|((old, new), substore)| { + assert_ne!( + old, new, + "migration did not effect the root hash for substore {substore}", + ); + let substore_version = postmigration_snapshot + .prefix_version(substore.as_str()) + .expect("prefix exists") + .unwrap(); + assert_eq!( + substore_version, + num_ops_per_substore - 1, + "substore version should not change" + ); + }); + + // Check that the version number for every substore has NOT changed. + let new_substore_versions: Vec = substore_prefixes + .clone() + .into_iter() + .map(|prefix| { + let new_version = postmigration_snapshot + .prefix_version(prefix.as_str()) + .expect("prefix exists"); + new_version.expect("substore is initialized") + }) + .collect(); + + old_substore_versions + .iter() + .zip(new_substore_versions.iter()) + .zip(substore_prefixes.iter()) + .for_each(|((old, new), substore)| { + assert_eq!( + old, new, + "the version number for substore {substore} has changed!", + ); + }); + + // Check that the migration key is present in the latest snapshot. + for (migration_key, migration_value) in migration_kvs.clone().into_iter() { + let (some_value, proof) = postmigration_snapshot + .get_with_proof(migration_key.as_bytes().to_vec()) + .await?; + let retrieved_value = some_value.expect("migration key is found in the latest snapshot"); + assert_eq!(retrieved_value, migration_value); + + let merkle_path = MerklePath { + key_path: migration_key.split('/').map(|s| s.to_string()).collect(), + }; + let merkle_root = MerkleRoot { + hash: postmigration_root_hash.0.to_vec(), + }; + + proof + .verify_membership( + &FULL_PROOF_SPECS, + merkle_root, + merkle_path, + retrieved_value, + 0, + ) + .map_err(|e| tracing::error!("proof verification failed: {:?}", e)) + .expect("membership proof verifies"); + } + + // Check that every other key is still present in the latest snapshot. + for (key, value) in kvs.clone().into_iter() { + let (some_value, proof) = postmigration_snapshot + .get_with_proof(key.as_bytes().to_vec()) + .await?; + let retrieved_value = some_value.expect("key is found in the latest snapshot"); + assert_eq!(retrieved_value, value); + + let merkle_path = MerklePath { + key_path: key.split('/').map(|s| s.to_string()).collect(), + }; + let merkle_root = MerkleRoot { + hash: postmigration_root_hash.0.to_vec(), + }; + + proof + .verify_membership( + &FULL_PROOF_SPECS, + merkle_root, + merkle_path, + retrieved_value, + 0, + ) + .map_err(|e| tracing::error!("proof verification failed: {:?}", e)) + .expect("membership proof verifies"); + } + + /* ************************ */ + /* write some keys */ + /* in every substore */ + /* ... again ... */ + /* ************************ */ + counter = 0; + for i in 0..num_ops_per_substore { + let mut delta = StateDelta::new(storage.latest_snapshot()); + for substore in substore_prefixes.iter() { + let key = format!("{substore}/key_{i}"); + let value = format!("{substore}value_{i}").as_bytes().to_vec(); + kvs.push((key.clone(), value.clone())); + tracing::debug!(?key, "initializing substore {substore} with key-value pair"); + delta.put_raw(key.clone(), value.clone()); + } + + let root_hash = storage.commit(delta).await?; + tracing::info!(?root_hash, version = %i, "committed key-value pair"); + counter += 1; + } + assert_eq!(counter, num_ops_per_substore); + counter = 0; + + let final_root = storage + .latest_snapshot() + .root_hash() + .await + .expect("infaillible"); + + for (i, (key, value)) in kvs.clone().into_iter().enumerate() { + tracing::debug!(?key, "checking key-value pair"); + let snapshot = storage.latest_snapshot(); + let (some_value, proof) = snapshot.get_with_proof(key.as_bytes().to_vec()).await?; + let retrieved_value = some_value.expect("key is found in the latest snapshot"); + assert_eq!(retrieved_value, value); + + // We split the key into its substore prefix and the key itself. + let merkle_path = MerklePath { + key_path: key.split('/').map(|s| s.to_string()).collect(), + }; + let merkle_root = MerkleRoot { + hash: final_root.0.to_vec(), + }; + + proof + .verify_membership( + &FULL_PROOF_SPECS, + merkle_root, + merkle_path, + retrieved_value, + 0, + ) + .map_err(|e| tracing::error!(?e, key_index = ?i, "proof verification failed")) + .expect("membership proof verifies"); + + counter += 1; + } + + assert_eq!( + counter, + // For each substore, we wrote `num_ops_per_substore` keys twice. + substore_prefixes.len() as u64 * num_ops_per_substore * 2 + ); + + /* ****************************** */ + /* read nonexistent keys */ + /* ****************************** */ + for (idx, substore) in substore_prefixes.iter().enumerate() { + let key = format!("{substore}/nonexistent_key_{idx}"); + let (some_value, proof) = postmigration_snapshot + .get_with_proof(key.as_bytes().to_vec()) + .await?; + assert!(some_value.is_none()); + let merkle_path = MerklePath { + key_path: key.split('/').map(|s| s.to_string()).collect(), + }; + let merkle_root = MerkleRoot { + hash: final_root.0.to_vec(), + }; + + proof + .verify_non_membership(&FULL_PROOF_SPECS, merkle_root, merkle_path) + .map_err(|e| tracing::error!("proof verification failed: {:?}", e)) + .expect("nonmembership proof verifies"); + } + + Ok(()) +} + +#[cfg(feature = "migration-proptests")] +mod proptests { + use proptest::{ + arbitrary::any, + prelude::prop, + prop_assert, prop_assert_eq, prop_assert_ne, prop_oneof, + strategy::{BoxedStrategy, Just, Strategy}, + test_runner::{FileFailurePersistence, TestCaseError}, + }; + use sha2::Sha256; + use std::{ + collections::BTreeMap, + fmt::{Debug, Display, Formatter}, + path::PathBuf, + }; + use test_strategy::proptest; + + use cnidarium::{StateDelta, StateRead, StateWrite as _, Storage}; + use ibc_types::core::commitment::{MerklePath, MerkleRoot}; + + use crate::{FULL_PROOF_SPECS, MAIN_STORE_PROOF_SPEC}; + + struct ReferenceStore { + final_kv: BTreeMap>>, + } + + impl ReferenceStore { + fn new() -> Self { + Self { + final_kv: BTreeMap::new(), + } + } + + fn execute(&mut self, op: Operation) { + match op { + Operation::Insert(key, value) => { + if key.path() == "" { + panic!("empty key"); + } + self.final_kv.insert(key, Some(value)); + } + Operation::Delete(key) => { + self.final_kv.insert(key, None); + } + } + } + } + + fn prefix_list() -> Vec { + vec![ + "".to_string(), + "dex".to_string(), + "ibc".to_string(), + "misc".to_string(), + "staking".to_string(), + ] + } + + fn substore_list() -> Vec { + vec![ + "dex".to_string(), + "ibc".to_string(), + "misc".to_string(), + "staking".to_string(), + ] + } + + fn char_except_slash() -> impl Strategy { + any::().prop_filter("Exclude '/'", |c| *c != '/') + } + + fn valid_key_strategy() -> impl Strategy { + ( + char_except_slash(), + proptest::collection::vec(any::(), 0..=999), + ) + .prop_map(|(first_char, mut vec)| { + vec.insert(0, first_char); + vec.into_iter().collect() + }) + } + + fn storage_key_strategy() -> BoxedStrategy { + let prefixes = prefix_list(); + let substore_strategies: Vec> = + prefixes.into_iter().map(|s| Just(s).boxed()).collect(); + + let substore_strategy = prop::strategy::Union::new_weighted( + substore_strategies.into_iter().map(|s| (1, s)).collect(), + ); + + let key_strategy = valid_key_strategy(); + + (substore_strategy, key_strategy) + .prop_map(|(substore, key)| StorageKey::new(substore, key)) + .boxed() + } + + fn value_strategy() -> impl Strategy> { + // Generate a random byte array of length 1..10 + // The values don't actually matter for the tests. + prop::collection::vec(any::(), 1..10) + } + + fn operation_strategy() -> impl Strategy { + let insert_strategy = (storage_key_strategy(), value_strategy()) + .prop_map(|(key, value)| Operation::Insert(key, value)); + + let delete_strategy = storage_key_strategy().prop_map(Operation::Delete); + + prop_oneof![insert_strategy, delete_strategy,] + } + + fn insert_strategy() -> impl Strategy { + let insert_strategy = (storage_key_strategy(), value_strategy()) + .prop_map(|(key, value)| Operation::Insert(key, value)); + + prop_oneof![insert_strategy] + } + + fn insert_ops_strategy() -> impl Strategy> { + prop::collection::vec(insert_strategy(), 0..10000) + } + + fn operations_strategy() -> impl Strategy> { + prop::collection::vec(operation_strategy(), 0..10000) + } + + fn execute_transcript( + phase: &str, + reference_store: &mut ReferenceStore, + delta: &mut StateDelta, + transcript: Vec, + ) { + for op in transcript { + reference_store.execute(op.clone()); + let storage_key = op.key(); + let key_hash = storage_key.hash(); + let key_path = storage_key.path(); + + tracing::debug!( + prefix = storage_key.prefix(), + key = storage_key.truncated_key(), + ?key_hash, + ?op, + ?phase, + ); + + match op { + Operation::Insert(_key, value) => { + delta.put_raw(key_path, value); + } + Operation::Delete(_key) => { + delta.delete(key_path); + } + } + } + } + + async fn check_proofs( + state: cnidarium::Snapshot, + reference_store: &ReferenceStore, + root_hash: jmt::RootHash, + phase: &str, + ) -> Result<(), TestCaseError> { + for (storage_key, reference_value) in reference_store.final_kv.iter() { + let key_hash = storage_key.hash(); + let key_path = storage_key.path(); + + tracing::debug!( + prefix = storage_key.prefix(), + key = storage_key.truncated_key(), + ?key_hash, + ?phase, + "checking proofs" + ); + let result_proof = state + .get_with_proof(storage_key.encode_path()) + .await + .map_err(|e| tracing::error!(?e, "get_with_proof failed")); + + prop_assert!(result_proof.is_ok(), "can get with proof"); + + let (retrieved_value, proof) = result_proof.expect("can get with proof"); + prop_assert_eq!(&retrieved_value, reference_value); + + let merkle_path = storage_key.merkle_path(); + let merkle_root = MerkleRoot { + hash: root_hash.0.to_vec(), + }; + let specs = storage_key.proof_spec(); + let key_hash = jmt::KeyHash::with::(&key_path); + + tracing::debug!( + prefix = storage_key.prefix(), + num_proofs = proof.proofs.len(), + spec_len = specs.len(), + ?key_hash, + truncated_key = storage_key.truncated_key(), + is_existence = reference_value.is_some(), + "proof verification" + ); + + let proof_verifies = if let Some(value) = retrieved_value.clone() { + proof + .verify_membership(&specs, merkle_root, merkle_path, value, 0) + .map_err(|e| tracing::error!(?e, "existence proof failed")) + } else { + proof + .verify_non_membership(&specs, merkle_root, merkle_path) + .map_err(|e| tracing::error!(?e, "nonexistence proof failed")) + }; + + prop_assert!(proof_verifies.is_ok()); + } + Ok(()) + } + + /// Implements a standard migration test which consists of three phases: + /// - premigration: write and delete keys, check ex/nex proofs + /// - migration: write and delete keys using a migration commit, check ex/nex proofs + /// - postmigration: write new keys, check ex/nex proofs + async fn standard_migration_test( + db_path: PathBuf, + premigration_transcript: Vec, + migration_transcript: Vec, + postmigration_transcript: Vec, + ) -> Result<(), TestCaseError> { + let substore_prefixes = substore_list(); + + let premigration_len = premigration_transcript.len(); + let migration_len = migration_transcript.len(); + let postmigration_len = postmigration_transcript.len(); + let total_ops = premigration_len + migration_len + postmigration_len; + + tracing::info!( + premigration_len, + migration_len, + postmigration_len, + total_ops, + "starting test" + ); + + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()) + .await + .expect("Failed to load storage"); + // `ReferenceStore` is an in-memory store that tracks the latest value for each key + // To do this, the store execute each operation in the transcript and tracks the final value. + // It serves as a source of truth to compare the storage against. + let mut reference_store = ReferenceStore::new(); + + // Premigration: write and delete keys + let mut premigration_delta = StateDelta::new(storage.latest_snapshot()); + + execute_transcript( + "premigration", + &mut reference_store, + &mut premigration_delta, + premigration_transcript, + ); + + let premigration_root_hash = storage + .commit(premigration_delta) + .await + .expect("can commit premigration"); + let premigration_version = storage.latest_version(); + prop_assert_eq!(premigration_version, 0, "premigration version should be 0"); + + tracing::info!( + ?premigration_root_hash, + premigration_len, + "premigration operations have been committed" + ); + + let premigration_snapshot = storage.latest_snapshot(); + let _ = check_proofs( + premigration_snapshot, + &reference_store, + premigration_root_hash, + "premigration", + ) + .await?; + + // Migration: write and delete keys + let mut migration_delta = StateDelta::new(storage.latest_snapshot()); + execute_transcript( + "migration", + &mut reference_store, + &mut migration_delta, + migration_transcript, + ); + + let migration_root_hash = storage + .commit_in_place(migration_delta) + .await + .expect("can commit migration"); + let migration_version = storage.latest_version(); + prop_assert_eq!(migration_version, 0, "migration version should be 0"); + prop_assert_ne!( + migration_root_hash, + premigration_root_hash, + "migration root hash should be different than the premigration root hash" + ); + + storage.release().await; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()) + .await + .expect("can reload storage"); + + tracing::info!( + ?migration_root_hash, + migration_len, + "migration operations have been committed" + ); + + let migration_snapshot = storage.latest_snapshot(); + + let _ = check_proofs( + migration_snapshot, + &reference_store, + migration_root_hash, + "migration", + ) + .await?; + + // We toss the storage instance and reload it. + storage.release().await; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()) + .await + .expect("can reload storage"); + + // Post-migration: write new keys! + let mut postmigration_delta = StateDelta::new(storage.latest_snapshot()); + execute_transcript( + "postmigration", + &mut reference_store, + &mut postmigration_delta, + postmigration_transcript, + ); + + let postmigration_root_hash = storage + .commit(postmigration_delta) + .await + .expect("can commit postmigration"); + tracing::info!( + ?postmigration_root_hash, + num_ops = postmigration_len, + "postmigration operations have been committed" + ); + + let postmigration_version = storage.latest_version(); + prop_assert_eq!( + postmigration_version, + 1, + "postmigration version should be 1" + ); + prop_assert_ne!( + migration_root_hash, + postmigration_root_hash, + "postmigration root hash should be different than the migration root hash" + ); + + let post_migration_snapshot = storage.latest_snapshot(); + + let _ = check_proofs( + post_migration_snapshot, + &reference_store, + postmigration_root_hash, + "postmigration", + ) + .await?; + Ok(()) + } + + #[proptest(async = "tokio", cases = 100, failure_persistence = Some(Box::new(FileFailurePersistence::WithSource("regressions"))))] + async fn test_migration_substores( + #[strategy(operations_strategy())] premigration_transcript: Vec, + #[strategy(operations_strategy())] migration_transcript: Vec, + #[strategy(operations_strategy())] postmigration_transcript: Vec, + #[strategy(insert_ops_strategy())] nonexistence_keys: Vec, + ) { + let _ = tracing_subscriber::fmt::try_init(); + + let tmpdir = tempfile::tempdir().expect("Failed to create a temp dir"); + let db_path = tmpdir.into_path(); + let _ = standard_migration_test( + db_path.clone(), + premigration_transcript, + migration_transcript, + postmigration_transcript, + ) + .await; + + let storage = Storage::load(db_path.clone(), substore_list()) + .await + .expect("can reload storage"); + + let postmigration_root_hash = storage + .latest_snapshot() + .root_hash() + .await + .expect("infaillible"); + + // Check random keys that should not exist + for op in nonexistence_keys { + let storage_key = op.key(); + let key_hash = storage_key.hash(); + let key_path = storage_key.path(); + + tracing::debug!( + prefix = storage_key.prefix(), + key = &storage_key.truncated_key(), + ?key_hash, + ?op, + "nex: checking proofs" + ); + + let result_proof = storage + .latest_snapshot() + .get_with_proof(storage_key.encode_path()) + .await + .map_err(|e| tracing::error!(?e, "nex: get_with_proof failed")); + + prop_assert!(result_proof.is_ok(), "can get with proof"); + + let (retrieved_value, proof) = result_proof.expect("can get with proof"); + prop_assert!(retrieved_value.is_none(), "key should not exist"); + + let merkle_path = storage_key.merkle_path(); + let merkle_root = MerkleRoot { + hash: postmigration_root_hash.0.to_vec(), + }; + let specs = storage_key.proof_spec(); + let key_hash = jmt::KeyHash::with::(&key_path); + + tracing::debug!( + prefix = storage_key.prefix(), + num_proofs = proof.proofs.len(), + spec_len = specs.len(), + ?key_hash, + truncated_key = &storage_key.truncated_key(), + "nex: proof verification" + ); + + let proof_verifies = proof + .verify_non_membership(&specs, merkle_root, merkle_path) + .map_err(|e| tracing::error!(?e, "nonexistence: nonexistence proof failed")); + + prop_assert!(proof_verifies.is_ok()); + } + } + + #[derive(Clone)] + enum Operation { + Insert(StorageKey, Vec), + Delete(StorageKey), + } + + impl Operation { + fn key(&self) -> &StorageKey { + match self { + Operation::Insert(key, _) => key, + Operation::Delete(key) => key, + } + } + } + + impl Debug for Operation { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Operation::Insert(_, _) => write!(f, "Insert"), + Operation::Delete(_) => write!(f, "Delete"), + } + } + } + + #[derive(PartialEq, Eq, Clone, PartialOrd, Ord)] + struct StorageKey { + prefix: String, + key: String, + full_path: String, + } + + impl Display for StorageKey { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.path()) + } + } + + impl Debug for StorageKey { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.full_path) + } + } + + impl StorageKey { + fn new(prefix: String, key: String) -> Self { + let full_path = if prefix.is_empty() { + key.clone() + } else { + format!("{}/{}", prefix, key) + }; + + Self { + prefix, + key, + full_path, + } + } + + fn truncated_key(&self) -> String { + self.key.chars().take(5).collect() + } + + fn hash(&self) -> jmt::KeyHash { + jmt::KeyHash::with::(&self.full_path) + } + + fn encode_path(&self) -> Vec { + self.path().as_bytes().to_vec() + } + + fn prefix(&self) -> &String { + &self.prefix + } + + fn is_main_store(&self) -> bool { + self.prefix == "" + } + + fn path(&self) -> String { + self.full_path.clone() + } + + fn merkle_path(&self) -> MerklePath { + if self.is_main_store() { + return MerklePath { + key_path: vec![self.key.clone()], + }; + } else { + MerklePath { + key_path: vec![self.prefix.clone(), self.key.clone()], + } + } + } + + fn proof_spec(&self) -> Vec { + if self.is_main_store() { + MAIN_STORE_PROOF_SPEC.clone() + } else { + FULL_PROOF_SPECS.clone() + } + } + } +} diff --git a/crates/cnidarium/tests/substore_tests.rs b/crates/cnidarium/tests/substore_tests.rs new file mode 100644 index 0000000000..f3ee5e4f83 --- /dev/null +++ b/crates/cnidarium/tests/substore_tests.rs @@ -0,0 +1,751 @@ +use cnidarium::StateDelta; +use cnidarium::StateRead; +use cnidarium::StateWrite; +use cnidarium::Storage; +use ibc_types::core::commitment::MerklePath; +use ibc_types::core::commitment::MerkleRoot; +use jmt::RootHash; +use once_cell::sync::Lazy; +use tempfile; +use tokio; +use tokio_stream::StreamExt; + +#[tokio::test] +#[should_panic] +/// Test that we cannot create a storage with an empty substore prefix. +async fn test_disallow_empty_prefix() -> () { + let tmpdir = tempfile::tempdir().expect("creating a temporary directory works"); + let db_path = tmpdir.into_path(); + let substore_prefixes = vec![""].into_iter().map(|s| s.to_string()).collect(); + let _ = Storage::load(db_path, substore_prefixes).await.unwrap(); +} + +#[tokio::test] +/// Test that we route keys correctly, in particular that we do not allow collisions for keys +/// that lack a delimiter e.g. `prefix_a/key` and `prefix_akey`. +async fn test_route_key_cases() -> () { + let tmpdir = tempfile::tempdir().expect("creating a temporary directory works"); + let db_path = tmpdir.into_path(); + let substore_prefixes = vec!["prefix_a", "prefix_b"] + .into_iter() + .map(|s| s.to_string()) + .collect(); + let storage = Storage::load(db_path, substore_prefixes).await.unwrap(); + let mut delta = StateDelta::new(storage.latest_snapshot()); + + let keys = vec![ + "prefix_a/key_1", + "prefix_akey_1", + "prefix_a/", + // TODO(erwan): Making sure that there are no collisions between + // `prefix_a/` and `prefix_a` is important. However, in practice + // `prefix_a` stores the root hash of the substore, so 1/ it will + // not containt the value we put in it since it got overwritten + // during the commit step, 2/ it will be disallowed shortly + // in a follow-up PR. When we do that, we can remove the commented + // out test case below. And instead replace it with a vector that + // checks that we are NOT able to write to `prefix_a` directly. + // "prefix_a", <- this should be disallowed. + "prefix_b/key_1", + ]; + let values = vec![ + "value_1a".as_bytes().to_vec(), + "value_1b".as_bytes().to_vec(), + "value_1c".as_bytes().to_vec(), + "value_1d".as_bytes().to_vec(), + "value_1e".as_bytes().to_vec(), + ]; + + for (key, value) in keys.iter().zip(values.iter()) { + delta.put_raw(key.to_string(), value.to_vec()); + } + let _ = storage.commit(delta).await.unwrap(); + let snapshot = storage.latest_snapshot(); + + for (key, value) in keys.iter().zip(values.iter()) { + let retrieved_value = snapshot.get_raw(key).await.unwrap().unwrap(); + assert_eq!(retrieved_value, *value, "key (key={}) should match", key); + } +} + +#[tokio::test] +async fn test_substore_proofs() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec!["ibc", "prefix_b", "prefix_c"] + .into_iter() + .map(|s| s.to_string()) + .collect(); + let storage = Storage::load(db_path, substore_prefixes).await?; + let mut delta = StateDelta::new(storage.latest_snapshot()); + + let key_a_1 = "ibc/key_1".to_string(); + let value_a_1 = "value_1a".as_bytes().to_vec(); + delta.put_raw(key_a_1.clone(), value_a_1.clone()); + + storage.commit(delta).await?; + + let snapshot = storage.latest_snapshot(); + + pub static PENUMBRA_PROOF_SPECS: Lazy> = + Lazy::new(|| vec![cnidarium::ics23_spec(), cnidarium::ics23_spec()]); + + // check that we can verify proofs back to the root for the new value. + let root = snapshot.root_hash().await?; + let (retreived_value, proof) = snapshot.get_with_proof(key_a_1.into()).await?; + assert_eq!( + Some(value_a_1), + retreived_value.clone(), + "key should exist and match value" + ); + let merkle_path = MerklePath { + key_path: vec!["ibc".to_string(), "key_1".to_string()], + }; + let merkle_root = MerkleRoot { + hash: root.0.to_vec(), + }; + proof.verify_membership( + &PENUMBRA_PROOF_SPECS, + merkle_root.clone(), + merkle_path, + retreived_value.unwrap(), + 0, + )?; + + // check that non-existence proofs work + let (retreived_value, nex_proof) = snapshot.get_with_proof("ibc/doesntexist".into()).await?; + assert_eq!(retreived_value, None, "key should not exist"); + + tracing::debug!(?retreived_value, ?nex_proof, "got non-existence proof"); + + let merkle_path = MerklePath { + key_path: vec!["ibc".to_string(), "doesntexist".to_string()], + }; + nex_proof + .verify_non_membership(&PENUMBRA_PROOF_SPECS, merkle_root, merkle_path) + .expect("non-existence proof should verify"); + + Ok(()) +} + +#[tokio::test] +/// Test that we can create a storage with multiple substores, that we can write to them, and that +/// we can read from them. +async fn test_substore_simple() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec!["prefix_a", "prefix_b", "prefix_c"] + .into_iter() + .map(|s| s.to_string()) + .collect(); + let storage = Storage::load(db_path, substore_prefixes).await?; + let mut delta = StateDelta::new(storage.latest_snapshot()); + + let key_a_1 = "prefix_a/key_1".to_string(); + let value_a_1 = "value_1a".as_bytes().to_vec(); + tracing::debug!(?key_a_1, ?value_a_1, "calling `put_raw`"); + delta.put_raw(key_a_1.clone(), value_a_1.clone()); + + let key_root_1 = "key_1".to_string(); + let value_root_1 = "value_1".as_bytes().to_vec(); + tracing::debug!(?key_root_1, ?value_root_1, "calling `put_raw`"); + delta.put_raw(key_root_1.clone(), value_root_1.clone()); + + tracing::debug!("committing first batch of writes"); + let global_root_hash_1 = storage.commit(delta).await?; + + tracing::debug!("checking that we can read the values back out"); + // Check that we can read the values back out. + let snapshot = storage.latest_snapshot(); + + let retrieved_value_a1 = snapshot.get_raw(key_a_1.as_str()).await?.unwrap(); + assert_eq!(retrieved_value_a1, value_a_1); + + let retrieved_value_root_1 = snapshot.get_raw(key_root_1.as_str()).await?.unwrap(); + assert_eq!(retrieved_value_root_1, value_root_1); + + // Check that the prefix root hash is stored at the correct key. + // One method looks up the key in the main store, the other creates a jmt over + // the substore and looks up the root hash in that tree. + let retrieved_prefix_a_root_hash = snapshot + .get_raw("prefix_a") + .await? + .expect("key `prefix_a` should contain the substore root hash"); + assert_eq!( + retrieved_prefix_a_root_hash.len(), + global_root_hash_1.0.len() + ); + + let retrieved_prefix_a_root_hash = RootHash(retrieved_prefix_a_root_hash.try_into().unwrap()); + let prefix_a_root_hash = snapshot.prefix_root_hash("prefix_a").await?; + assert_eq!(prefix_a_root_hash, retrieved_prefix_a_root_hash); + let old_prefix_a_root_hash = prefix_a_root_hash; + + drop(snapshot); + + // Check that we can read new values from a prefixed substore, then check that + // versioning works correctly, by making sure that we fetch the correct root hash. + let key_a_2 = "prefix_a/key_2".to_string(); + let value_a_2 = "value_2a".as_bytes().to_vec(); + let mut delta = StateDelta::new(storage.latest_snapshot()); + delta.put_raw(key_a_2.clone(), value_a_2.clone()); + let global_root_hash_2 = storage.commit(delta).await?; + + // CHeck that we can read the new value back out. + let snapshot = storage.latest_snapshot(); + let retrieved_value_a2 = snapshot.get_raw(key_a_2.as_str()).await?.unwrap(); + assert_eq!(retrieved_value_a2, value_a_2); + let retrieved_value_a1 = snapshot.get_raw(key_a_1.as_str()).await?.unwrap(); + assert_eq!(retrieved_value_a1, value_a_1); + + // Retrieve the substore root hash again, and check that it has changed. + assert_ne!(global_root_hash_1, global_root_hash_2); // sanity check. + let prefix_a_root_hash = snapshot.prefix_root_hash("prefix_a").await?; + let retrieved_prefix_a_root_hash = snapshot + .get_raw("prefix_a") + .await? + .expect("prefix_a should contain the substore root hash"); + assert_eq!( + prefix_a_root_hash, + RootHash(retrieved_prefix_a_root_hash.try_into().unwrap()) + ); + assert_ne!(prefix_a_root_hash, old_prefix_a_root_hash); + + Ok(()) +} + +#[tokio::test] +/// Test that we can create a storage with multiple substores, that we can write to them, and that +/// we can read from them using prefix queries. +async fn test_substore_prefix_queries() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec!["prefix_a", "prefix_b", "prefix_c"] + .into_iter() + .map(|s| s.to_string()) + .collect(); + let storage = Storage::load(db_path, substore_prefixes).await?; + let mut delta = StateDelta::new(storage.latest_snapshot()); + + let mut all_kv = vec![]; + let mut kv_a = vec![]; + let mut kv_b = vec![]; + let mut kv_main = vec![]; + for i in 0..10 { + let key_a_i = format!("prefix_a/key_{}", i); + let value_a_i = format!("value_{}a", i).as_bytes().to_vec(); + delta.put_raw(key_a_i.clone(), value_a_i.clone()); + all_kv.push((key_a_i.clone(), value_a_i.clone())); + kv_a.push((key_a_i, value_a_i)); + } + + for i in 0..10 { + let key_b_i = format!("prefix_b/key_{}", i); + let value_b_i = format!("value_{}b", i).as_bytes().to_vec(); + delta.put_raw(key_b_i.clone(), value_b_i.clone()); + all_kv.push((key_b_i.clone(), value_b_i.clone())); + kv_b.push((key_b_i, value_b_i)); + } + + for i in 0..10 { + let key_i = format!("key_{}", i); + let value_i = format!("value_{}", i).as_bytes().to_vec(); + delta.put_raw(key_i.clone(), value_i.clone()); + all_kv.push((key_i.clone(), value_i.clone())); + kv_main.push((key_i, value_i)); + } + + let _ = storage.commit(delta).await?; + + let snapshot = storage.latest_snapshot(); + let mut counter = 0; + let query_prefix = "prefix_a"; + let mut range = snapshot.prefix_keys(query_prefix); + while let Some(res) = range.next().await { + let key = res?; + if counter >= kv_a.len() { + tracing::debug!(?key, ?query_prefix, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_a[counter].0.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + counter += 1; + } + assert_eq!( + counter, + kv_a.len(), + "should have iterated over all keys (prefix_a)" + ); + + let mut counter = 0; + let query_prefix = "prefix_b"; + let mut range = snapshot.prefix_keys(query_prefix); + while let Some(res) = range.next().await { + let key = res?; + + if counter >= kv_b.len() { + tracing::debug!(?key, ?query_prefix, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_b[counter].0.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + counter += 1; + } + assert_eq!( + counter, + kv_b.len(), + "should have iterated over all keys (prefix_b)" + ); + + let mut counter = 0; + let query_prefix = "key"; + let mut range = snapshot.prefix_keys(query_prefix); + while let Some(res) = range.next().await { + let key = res?; + tracing::debug!(?key, ?query_prefix, "iterating over keys"); + + if counter >= kv_main.len() { + tracing::debug!(?key, ?query_prefix, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_main[counter].0.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + counter += 1; + } + assert_eq!( + counter, + kv_main.len(), + "should have iterated over all keys (main)" + ); + + Ok(()) +} + +#[tokio::test] +/// Test that `StateRead::prefix_keys` work as expected. +/// This test is similar to `test_substore_prefix_queries`, but uses `prefix_keys` instead of +/// `prefix_raw`. +async fn test_substore_prefix_keys() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec!["prefix_a", "prefix_b", "prefix_c"] + .into_iter() + .map(|s| s.to_string()) + .collect(); + let storage = Storage::load(db_path, substore_prefixes).await?; + let mut delta = StateDelta::new(storage.latest_snapshot()); + + let mut all_kv = vec![]; + let mut kv_a = vec![]; + let mut kv_b = vec![]; + let mut kv_main = vec![]; + for i in 0..10 { + let key_a_i = format!("prefix_a/key_{}", i); + let value_a_i = format!("value_{}a", i).as_bytes().to_vec(); + delta.put_raw(key_a_i.clone(), value_a_i.clone()); + all_kv.push((key_a_i.clone(), value_a_i.clone())); + kv_a.push((key_a_i, value_a_i)); + } + + for i in 0..10 { + let key_b_i = format!("prefix_b/key_{}", i); + let value_b_i = format!("value_{}b", i).as_bytes().to_vec(); + delta.put_raw(key_b_i.clone(), value_b_i.clone()); + all_kv.push((key_b_i.clone(), value_b_i.clone())); + kv_b.push((key_b_i, value_b_i)); + } + + for i in 0..10 { + let key_i = format!("key_{}", i); + let value_i = format!("value_{}", i).as_bytes().to_vec(); + delta.put_raw(key_i.clone(), value_i.clone()); + all_kv.push((key_i.clone(), value_i.clone())); + kv_main.push((key_i, value_i)); + } + + let _ = storage.commit(delta).await?; + + let snapshot = storage.latest_snapshot(); + let mut counter = 0; + let query_prefix = "prefix_a"; + let mut range = snapshot.prefix_keys(query_prefix); + while let Some(res) = range.next().await { + let key = res?; + if counter >= kv_a.len() { + tracing::debug!(?key, ?query_prefix, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_a[counter].0.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + counter += 1; + } + assert_eq!( + counter, + kv_a.len(), + "should have iterated over all keys (prefix_a)" + ); + + let mut counter = 0; + let query_prefix = "prefix_b"; + let mut range = snapshot.prefix_keys(query_prefix); + while let Some(res) = range.next().await { + let key = res?; + + if counter >= kv_b.len() { + tracing::debug!(?key, ?query_prefix, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_b[counter].0.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + counter += 1; + } + assert_eq!( + counter, + kv_b.len(), + "should have iterated over all keys (prefix_b)" + ); + + let mut counter = 0; + let query_prefix = "key"; + let mut range = snapshot.prefix_raw(query_prefix); + while let Some(res) = range.next().await { + let (key, value) = res?; + tracing::debug!(?key, ?query_prefix, "iterating over key/value pair"); + + if counter >= kv_main.len() { + tracing::debug!(?key, ?value, ?query_prefix, "unexpected key/value pair"); + panic!("prefix query returned too many entries") + } + + let expected_key = kv_main[counter].0.clone(); + let expected_value = kv_main[counter].1.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + assert_eq!(value, expected_value, "value {} should match", counter); + counter += 1; + } + assert_eq!( + counter, + kv_main.len(), + "should have iterated over all keys (main)" + ); + + Ok(()) +} + +#[tokio::test] +/// Test that `StateRead::nonverifiable_prefix_raw` works as expected. +/// This test is similar to `test_substore_prefix_queries`, but uses nonverifiable storage. +async fn test_substore_nv_prefix() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec!["prefix_a", "prefix_b", "prefix_c"] + .into_iter() + .map(|s| s.to_string()) + .collect(); + let storage = Storage::load(db_path, substore_prefixes).await?; + let mut delta = StateDelta::new(storage.latest_snapshot()); + + let mut all_kv = vec![]; + let mut kv_a = vec![]; + let mut kv_b = vec![]; + let mut kv_main = vec![]; + for i in 0..10 { + let key_a_i = format!("prefix_a/key_{}", i); + let value_a_i = format!("value_{}a", i); + delta.nonverifiable_put_raw(key_a_i.as_bytes().to_vec(), value_a_i.as_bytes().to_vec()); + all_kv.push((key_a_i.clone(), value_a_i.clone())); + kv_a.push((key_a_i, value_a_i)); + } + + for i in 0..10 { + let key_b_i = format!("prefix_b/key_{}", i); + let value_b_i = format!("value_{}b", i); + delta.nonverifiable_put_raw(key_b_i.as_bytes().to_vec(), value_b_i.as_bytes().to_vec()); + all_kv.push((key_b_i.clone(), value_b_i.clone())); + kv_b.push((key_b_i, value_b_i)); + } + + for i in 0..10 { + let key_i = format!("key_{}", i); + let value_i = format!("value_{}", i); + delta.nonverifiable_put_raw(key_i.as_bytes().to_vec(), value_i.as_bytes().to_vec()); + all_kv.push((key_i.clone(), value_i.clone())); + kv_main.push((key_i, value_i)); + } + + let _ = storage.commit(delta).await?; + + let snapshot = storage.latest_snapshot(); + let mut counter = 0; + let query_prefix = "prefix_a".as_bytes(); + let mut range = snapshot.nonverifiable_prefix_raw(query_prefix); + while let Some(res) = range.next().await { + let (raw_key, raw_value) = res?; + let key = String::from_utf8(raw_key)?; + let value = String::from_utf8(raw_value)?; + if counter >= kv_a.len() { + tracing::debug!(?key, ?query_prefix, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_a[counter].0.clone(); + let expected_value = kv_a[counter].1.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + assert_eq!(value, expected_value, "value {} should match", counter); + + counter += 1; + } + assert_eq!( + counter, + kv_a.len(), + "should have iterated over all prefix (prefix_a)" + ); + + let mut counter = 0; + let query_prefix = "prefix_b".as_bytes(); + let mut range = snapshot.nonverifiable_prefix_raw(query_prefix); + while let Some(res) = range.next().await { + let (raw_key, raw_value) = res?; + let key = String::from_utf8(raw_key)?; + let value = String::from_utf8(raw_value)?; + + if counter >= kv_b.len() { + tracing::debug!(?key, ?query_prefix, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_b[counter].0.clone(); + let expected_value = kv_b[counter].1.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + assert_eq!(value, expected_value, "value {} should match", counter); + counter += 1; + } + assert_eq!( + counter, + kv_b.len(), + "should have iterated over all prefix (prefix_b)" + ); + + let mut counter = 0; + let query_prefix = "key".as_bytes(); + let mut range = snapshot.nonverifiable_prefix_raw(query_prefix); + while let Some(res) = range.next().await { + let (raw_key, raw_value) = res?; + let key = String::from_utf8(raw_key)?; + let value = String::from_utf8(raw_value)?; + + tracing::debug!(?key, ?query_prefix, "iterating over prefix"); + + if counter >= kv_main.len() { + tracing::debug!(?key, ?query_prefix, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_main[counter].0.clone(); + let expected_value = kv_main[counter].1.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + assert_eq!(value, expected_value, "value {} should match", counter); + counter += 1; + } + assert_eq!( + counter, + kv_main.len(), + "should have iterated over all keys (main)" + ); + + Ok(()) +} + +#[tokio::test] +/// Test that range queries over the main store work as expected. +/// TODO(erwan): the range query implementation is broken for substores. Ignore this test for now. +#[ignore] +async fn test_substore_nv_range_queries_main_store() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec!["a", "b", "c", "d"] + .into_iter() + .map(|s| s.to_string()) + .collect(); + let storage = Storage::load(db_path, substore_prefixes).await?; + let mut delta = StateDelta::new(storage.latest_snapshot()); + + let mut all_kv = vec![]; + let mut kv_a = vec![]; + let mut kv_c = vec![]; + let mut kv_d = vec![]; + let mut kv_main = vec![]; + for i in 0..100 { + let key_a_i = format!("a/key_{}", i); + let value_a_i = format!("value_{}a", i).as_bytes().to_vec(); + delta.put_raw(key_a_i.clone(), value_a_i.clone()); + all_kv.push((key_a_i.clone(), value_a_i.clone())); + kv_a.push((key_a_i, value_a_i)); + } + + for i in 0..100 { + let key_c_i = format!("c/key_{}", i); + let value_c_i = format!("value_{}c", i).as_bytes().to_vec(); + delta.put_raw(key_c_i.clone(), value_c_i.clone()); + all_kv.push((key_c_i.clone(), value_c_i.clone())); + kv_c.push((key_c_i, value_c_i)); + } + + for i in 0..100 { + let key_i = format!("compactblock/{i:020}"); + let value_i = format!("value_{}", i).as_bytes().to_vec(); + delta.put_raw(key_i.clone(), value_i.clone()); + all_kv.push((key_i.clone(), value_i.clone())); + kv_main.push((key_i, value_i)); + } + + for i in 0..100 { + let key_d_i = format!("d/key_{}", i); + let value_d_i = format!("value_{}c", i).as_bytes().to_vec(); + delta.put_raw(key_d_i.clone(), value_d_i.clone()); + all_kv.push((key_d_i.clone(), value_d_i.clone())); + kv_d.push((key_d_i, value_d_i)); + } + + let _ = storage.commit(delta).await?; + + let snapshot = storage.latest_snapshot(); + + // First, check that we can iterate over a range of compact blocks in the main store. + // We define a range that spans all compact blocks between 12 and 34. + let mut counter = 0; + let start_index = 12; + let end_index = 34; + let mut index = start_index; + + let start_key = format!("compactblock/{:020}", start_index) + .as_bytes() + .to_vec(); + let end_key = format!("compactblock/{:020}", end_index) + .as_bytes() + .to_vec(); + let mut range = snapshot.nonverifiable_range_raw(None, start_key.clone()..end_key.clone())?; + while let Some(res) = range.next().await { + let (raw_key, _) = res?; + let key = String::from_utf8(raw_key)?; + if counter > kv_a.len() { + tracing::debug!(?key, ?start_key, ?end_key, "unexpected key"); + panic!("prefix_keys query returned too many entries") + } + + let expected_key = kv_main[index].0.clone(); + assert_eq!(key, expected_key, "key {} should match", counter); + counter += 1; + index += 1; + } + assert_eq!( + counter, + end_index - start_index, + "should have iterated over all entries (compact block range)" + ); + + Ok(()) +} + +#[tokio::test] +/// Minimal reproduction of the prefix range cache bug. +/// +/// Context: +/// `cnidarium`, our storage layer, supports prefix storage. +/// This allows users to configure independent storage units, each with +/// their own merkle tree, nonverifiable sidecar, and separate namespace. +/// Routing is done transparently without the user having to worry about +/// the details. +/// +/// Overview: +/// Prefix queries return tuples of (key, value)s, but instead of +/// returning the full key, they return the substore key. This is a layering +/// violation, and indeed causes a bug in the cache interleaving logic. +/// +/// Terminology: +/// - a `full_key`: a key that contains a substore prefix, a delimiter, and a substore key. +/// - a `substore_key`: a key with a stripped prefix. +/// +/// Walkthrough: +/// `StateDelta` index changes using full keys, as it is not aware of the +/// particular substore configuration that it is working against, by design. +/// As part of the cache interleaving logic, the `StateDetla` will try look for +/// new writes or covering deletions. However, since the base prefix implementation +/// returns substore keys, the cache will build an incoherence range and panic (or miss data). +async fn reproduction_bad_substore_cache_range() -> anyhow::Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + // We pick a friendly prefix with high lexicographic order to help + // with reproducing a "bad range" where the lower boundn is greater than + // the upper bound. + let substore_prefix = "zest".to_string(); + let substore_prefixes = vec![substore_prefix.clone()]; + let storage = Storage::load(db_path, substore_prefixes).await?; + + // Write some keys in the substore so that we can prefix range over something + let mut delta = StateDelta::new(storage.latest_snapshot()); + + let mut substore_kvs = vec![]; + + for i in 0..100 { + let k = format!("{}/key_{i:020}", substore_prefix); + let v = format!("value_{i}").as_bytes().to_vec(); + delta.put_raw(k.clone(), v.clone()); + substore_kvs.push(k) + } + + let _ = storage.commit(delta).await?; + let snapshot = storage.latest_snapshot(); + + // We can prefix range fine on a static snapshot. + let mut naive_prefix = snapshot.prefix_raw("zest/"); + // Track the number of prefix entries returned as a basic check. + let mut visited = vec![]; + while let Some(entry) = naive_prefix.next().await { + let (k, _) = entry?; + visited.push(k); + } + assert_eq!(visited, substore_kvs, "prefix query is missing keys"); + + // We established that we can do prefix range on a static snapshot. + // Now let's try on a no-op `StateDelta` + let mut delta = StateDelta::new(snapshot); + let mut clean_delta_prefix = delta.prefix_raw("zest/"); + let mut visited = vec![]; + while let Some(entry) = clean_delta_prefix.next().await { + let (k, _) = entry?; + visited.push(k); + } + assert_eq!(visited, substore_kvs, "prefix query is missing keys"); + + // It worked, finally let's try on a dirty delta. + delta.put_raw( + "zest/normal_key".to_string(), + "normal_value".as_bytes().to_vec(), + ); + let mut dirty_delta_prefix = delta.prefix_raw("zest/"); + let mut visited = vec![]; + // Cache interleaving logic will build a bad range and cause a panic. + // Check out `v0.77.3` or prior to see the panic. + while let Some(entry) = dirty_delta_prefix.next().await { + let (k, _) = entry?; + visited.push(k); + } + // Add the key we wrote to the substore. + substore_kvs.push("zest/normal_key".to_string()); + assert_eq!(visited, substore_kvs, "prefix query is missing keys"); + + Ok(()) +} diff --git a/crates/cnidarium/tests/write_batch.rs b/crates/cnidarium/tests/write_batch.rs new file mode 100644 index 0000000000..0717400fd0 --- /dev/null +++ b/crates/cnidarium/tests/write_batch.rs @@ -0,0 +1,302 @@ +use anyhow::Result; +use cnidarium::{StateDelta, StateWrite, Storage}; +use tempfile; +use tokio; + +#[tokio::test] +/// A simple test that checks that we cannot commit a stale batch to storage. +/// Strategy: +/// Create three state deltas, one that writes to every substore, and two others +/// that target specific substores or none at all. +pub async fn test_write_batch_stale_version_substores() -> Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec![ + "ibc".to_string(), + "dex".to_string(), + "misc".to_string(), + "cometbft-data".to_string(), + ]; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()).await?; + let initial_snapshot = storage.latest_snapshot(); + let initial_version = initial_snapshot.version(); + let initial_root_hash = initial_snapshot.root_hash().await?; + assert_eq!( + initial_version, + u64::MAX, + "initial version should be u64::MAX" + ); + assert_eq!(initial_root_hash.0, [0u8; 32]); + + /* ************************ Prepare three deltas ************************** */ + // Our goal is to check that we can't commit a batch with a stale version. + // We create three deltas: + // 1. Empty delta + // 2. Delta that writes to one substore + // 3. Delta that writes to each substore and also writes to the main store + + /* We create an empty delta that writes no keys. */ + let delta_1 = StateDelta::new(initial_snapshot); + let write_batch_1 = storage.prepare_commit(delta_1).await?; + let version_1 = write_batch_1.version(); + let root_hash_1 = write_batch_1.root_hash().clone(); + assert_eq!(version_1, initial_version.wrapping_add(1)); + assert_ne!(root_hash_1.0, initial_root_hash.0); + + // We check that merely preparing a batch does not write anything. + let state_snapshot = storage.latest_snapshot(); + assert_eq!(state_snapshot.version(), initial_version); + assert_eq!(state_snapshot.root_hash().await?.0, initial_root_hash.0); + for prefix in substore_prefixes.iter() { + // We didn't write to any substores, so their version should be unchanged. + assert_eq!( + write_batch_1 + .substore_version(prefix) + .expect("substore exists"), + u64::MAX + ) + } + + /* We create a new delta that writes to a single substore. */ + let mut delta_2 = StateDelta::new(state_snapshot.clone()); + delta_2.put_raw("ibc/key".to_string(), [1u8; 32].to_vec()); + let write_batch_2 = storage.prepare_commit(delta_2).await?; + let version_2 = write_batch_2.version(); + let root_hash_2 = write_batch_2.root_hash(); + assert_eq!(version_2, initial_version.wrapping_add(1)); + assert_ne!(root_hash_2.0, initial_root_hash.0); + + // Now, we check that the version for the main store is incremented, and + // only the version for the ibc substore is incremented. + assert_eq!(write_batch_2.version(), initial_version.wrapping_add(1)); + assert_eq!( + write_batch_2 + .substore_version("ibc") + .expect("substore_exists"), + initial_version.wrapping_add(1) + ); + for prefix in substore_prefixes.iter().filter(|p| *p != "ibc") { + assert_eq!( + write_batch_2 + .substore_version(prefix) + .expect("substore exists"), + u64::MAX + ) + } + + /* We create a new delta that writes to each substore. */ + let mut delta_3 = StateDelta::new(state_snapshot); + for substore_prefix in substore_prefixes.iter() { + let key = format!("{}/key", substore_prefix); + tracing::debug!(?key, "adding to delta_1"); + delta_3.put_raw(key, [1u8; 32].to_vec()); + } + let write_batch_3 = storage.prepare_commit(delta_3).await?; + let version_3 = write_batch_3.version(); + let root_hash_3 = write_batch_3.root_hash().clone(); + + // Once again, we check that we incremented the main store version. + assert_eq!(version_3, initial_version.wrapping_add(1)); + assert_ne!(root_hash_3.0, initial_root_hash.0); + // In addition to that, we check that we incremented the version of each substore. + for prefix in substore_prefixes.iter() { + assert_eq!( + write_batch_3 + .substore_version(prefix) + .expect("substore exists"), + initial_version.wrapping_add(1) + ) + } + + /* Persist `write_batch_1` and check that the two other (stale) deltas cannot be applied. */ + let final_root = storage + .commit_batch(write_batch_1) + .expect("committing batch 3 should work"); + let final_snapshot = storage.latest_snapshot(); + assert_eq!(root_hash_1.0, final_root.0); + assert_eq!(root_hash_1.0, final_snapshot.root_hash().await?.0); + assert_eq!(version_1, final_snapshot.version()); + assert!( + storage.commit_batch(write_batch_2).is_err(), + "committing batch 2 should fail" + ); + assert!( + storage.commit_batch(write_batch_3).is_err(), + "committing batch 3 should fail" + ); + + Ok(()) +} + +#[tokio::test] +/// Test that we can commit a batch without incrementing the substore versions if there are no +/// keys to write. +pub async fn test_two_empty_writes() -> Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec![ + "ibc".to_string(), + "dex".to_string(), + "misc".to_string(), + "cometbft-data".to_string(), + ]; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()).await?; + let initial_snapshot = storage.latest_snapshot(); + let initial_version = initial_snapshot.version(); + let initial_root_hash = initial_snapshot.root_hash().await?; + assert_eq!( + initial_version, + u64::MAX, + "initial version should be u64::MAX" + ); + assert_eq!(initial_root_hash.0, [0u8; 32]); + + let mut delta_1 = StateDelta::new(initial_snapshot); + for substore_prefix in substore_prefixes.iter() { + let key = format!("{}/key", substore_prefix); + tracing::debug!(?key, "adding to delta_1"); + delta_1.put_raw(key, [1u8; 12].to_vec()); + } + let write_batch_1 = storage.prepare_commit(delta_1).await?; + let version_1 = write_batch_1.version(); + let root_hash_1 = write_batch_1.root_hash().clone(); + + assert_eq!(version_1, initial_version.wrapping_add(1)); + assert_ne!(root_hash_1.0, initial_root_hash.0); + for prefix in substore_prefixes.iter() { + assert_eq!( + write_batch_1 + .substore_version(prefix) + .expect("substore exists"), + initial_version.wrapping_add(1) + ) + } + + // We check that merely preparing a batch does not write anything. + let state_snapshot = storage.latest_snapshot(); + assert_eq!(state_snapshot.version(), initial_version); + assert_eq!(state_snapshot.root_hash().await?.0, initial_root_hash.0); + + /* We create a new delta that writes no keys */ + let delta_2 = StateDelta::new(state_snapshot.clone()); + let write_batch_2 = storage.prepare_commit(delta_2).await?; + let version_2 = write_batch_2.version(); + let root_hash_2 = write_batch_2.root_hash(); + assert_eq!(version_2, initial_version.wrapping_add(1)); + assert_ne!(root_hash_2.0, initial_root_hash.0); + assert_eq!(write_batch_2.version(), initial_version.wrapping_add(1)); + for prefix in substore_prefixes.iter() { + assert_eq!( + write_batch_2 + .substore_version(prefix) + .expect("substore exists"), + initial_version + ) + } + + let block_1_root = storage + .commit_batch(write_batch_1) + .expect("committing batch 3 should work"); + let block_1_snapshot = storage.latest_snapshot(); + let block_1_version = block_1_snapshot.version(); + assert_eq!(root_hash_1.0, block_1_root.0); + assert_eq!(root_hash_1.0, block_1_snapshot.root_hash().await?.0); + assert_eq!(version_1, block_1_version); + assert!( + storage.commit_batch(write_batch_2).is_err(), + "committing batch 2 should fail" + ); + + /* We create an empty delta that writes no keys. */ + let delta_3 = StateDelta::new(block_1_snapshot); + let write_batch_3 = storage.prepare_commit(delta_3).await?; + let version_3 = write_batch_3.version(); + let root_hash_3 = write_batch_3.root_hash().clone(); + assert_eq!(version_3, block_1_version.wrapping_add(1)); + + /* Check that we can apply `write_batch_3` */ + let block_2_root = storage + .commit_batch(write_batch_3) + .expect("committing batch 3 should work"); + let block_2_snapshot = storage.latest_snapshot(); + let block_2_version = block_2_snapshot.version(); + assert_eq!(root_hash_3.0, block_2_root.0); + assert_eq!(root_hash_3.0, block_2_snapshot.root_hash().await?.0); + assert_eq!(version_3, block_2_version); + Ok(()) +} + +#[tokio::test] +/// Test that we can write prepare-commit batches that write to every +/// substore. +/// Intuition: we want to make sure that the version check that guards us from +/// writing stale batches, is working as expected. +pub async fn test_batch_substore() -> Result<()> { + let _ = tracing_subscriber::fmt::try_init(); + let tmpdir = tempfile::tempdir()?; + let db_path = tmpdir.into_path(); + let substore_prefixes = vec![ + "ibc".to_string(), + "dex".to_string(), + "misc".to_string(), + "cometbft-data".to_string(), + ]; + let storage = Storage::load(db_path.clone(), substore_prefixes.clone()).await?; + let initial_snapshot = storage.latest_snapshot(); + let initial_version = initial_snapshot.version(); + let initial_root_hash = initial_snapshot.root_hash().await?; + assert_eq!( + initial_version, + u64::MAX, + "initial version should be u64::MAX" + ); + assert_eq!(initial_root_hash.0, [0u8; 32]); + + for i in 0..100 { + let snapshot = storage.latest_snapshot(); + let prev_version = snapshot.version(); + let prev_root = snapshot + .root_hash() + .await + .expect("a root hash is available"); + + let mut delta = StateDelta::new(snapshot); + for substore_prefix in substore_prefixes.iter() { + let key = format!("{}/key_{i}", substore_prefix); + tracing::debug!(?key, index = i, "adding to delta"); + delta.put_raw(key, [1u8; 12].to_vec()); + } + let write_batch = storage.prepare_commit(delta).await?; + let next_version = write_batch.version(); + let next_root = write_batch.root_hash().clone(); + + assert_eq!(next_version, prev_version.wrapping_add(1)); + assert_ne!(next_root.0, prev_root.0); + for prefix in substore_prefixes.iter() { + assert_eq!( + write_batch + .substore_version(prefix) + .expect("substore exists"), + prev_version.wrapping_add(1) + ) + } + + // We check that merely preparing a batch does not write anything. + let state_snapshot = storage.latest_snapshot(); + assert_eq!(state_snapshot.version(), prev_version); + assert_eq!(state_snapshot.root_hash().await?.0, prev_root.0); + + let block_root = storage + .commit_batch(write_batch) + .expect("committing batch 3 should work"); + let block_snapshot = storage.latest_snapshot(); + let block_version = block_snapshot.version(); + assert_eq!(next_root.0, block_root.0); + assert_eq!(next_root.0, block_snapshot.root_hash().await?.0); + assert_eq!(next_version, block_version); + } + + Ok(()) +} diff --git a/crates/core/app/Cargo.toml b/crates/core/app/Cargo.toml index 002ec2f651..9013371d07 100644 --- a/crates/core/app/Cargo.toml +++ b/crates/core/app/Cargo.toml @@ -97,7 +97,7 @@ tonic = { workspace = true, optional = true } tonic-reflection = { workspace = true, optional = true } tonic-web = { workspace = true, optional = true } tower = { workspace = true, features = ["full"] } -tower-abci = "0.18" +tower-abci = "0.11" tower-actor = "0.1.0" tower-service = { workspace = true } tracing = { workspace = true } @@ -121,7 +121,7 @@ rand_core = { workspace = true } tap = { workspace = true } tempfile = { workspace = true } tendermint-config = { workspace = true } -tower-http = { workspace = true, features = ["cors"] } +tower-http = { workspace = true } tracing-subscriber = { workspace = true } url = { workspace = true } diff --git a/crates/core/app/src/rpc.rs b/crates/core/app/src/rpc.rs index c8a0d9ea32..90c1b26f1f 100644 --- a/crates/core/app/src/rpc.rs +++ b/crates/core/app/src/rpc.rs @@ -10,18 +10,16 @@ use { self::query::AppQueryServer, crate::PenumbraHost, anyhow::Context, - cnidarium::proto::v1::query_service_server::QueryServiceServer as StorageQueryServiceServer, - cnidarium::rpc::Server as StorageServer, - ibc_proto::{ - cosmos::bank::v1beta1::query_server::QueryServer as TransferQueryServer, - ibc::{ - applications::transfer::v1::query_server::QueryServer as BankQueryServer, - core::{ - channel::v1::query_server::QueryServer as ChannelQueryServer, - client::v1::query_server::QueryServer as ClientQueryServer, - connection::v1::query_server::QueryServer as ConnectionQueryServer, - }, - }, + cnidarium::rpc::{ + proto::v1::query_service_server::QueryServiceServer as StorageQueryServiceServer, + Server as StorageServer, + }, + ibc_proto::cosmos::bank::v1beta1::query_server::QueryServer as TransferQueryServer, + ibc_proto::ibc::applications::transfer::v1::query_server::QueryServer as BankQueryServer, + ibc_proto::ibc::core::{ + channel::v1::query_server::QueryServer as ChannelQueryServer, + client::v1::query_server::QueryServer as ClientQueryServer, + connection::v1::query_server::QueryServer as ConnectionQueryServer, }, penumbra_auction::component::rpc::Server as AuctionServer, penumbra_compact_block::component::rpc::Server as CompactBlockServer, @@ -52,19 +50,28 @@ use { penumbra_sct::component::rpc::Server as SctServer, penumbra_shielded_pool::component::rpc::Server as ShieldedPoolServer, penumbra_stake::component::rpc::Server as StakeServer, - tonic::service::Routes, + penumbra_tower_trace::remote_addr, tonic_web::enable as we, }; -pub fn routes( +pub fn router( storage: &cnidarium::Storage, tm_proxy: impl TendermintProxyService, _enable_expensive_rpc: bool, -) -> anyhow::Result { +) -> anyhow::Result { let ibc = penumbra_ibc::component::rpc::IbcQuery::::new(storage.clone()); - - let mut builder = Routes::builder(); - builder + let grpc_server = tonic::transport::server::Server::builder() + .trace_fn(|req| match remote_addr(req) { + Some(remote_addr) => { + tracing::error_span!("grpc", ?remote_addr) + } + None => tracing::error_span!("grpc"), + }) + // Allow HTTP/1, which will be used by grpc-web connections. + // This is particularly important when running locally, as gRPC + // typically uses HTTP/2, which requires HTTPS. Accepting HTTP/2 + // allows local applications such as web browsers to talk to pd. + .accept_http1(true) // As part of #2932, we are disabling all timeouts until we circle back to our // performance story. // Sets a timeout for all gRPC requests, but note that in the case of streaming @@ -118,7 +125,8 @@ pub fn routes( )))) .add_service(we(tonic_reflection::server::Builder::configure() .register_encoded_file_descriptor_set(penumbra_proto::FILE_DESCRIPTOR_SET) - .build_v1() + .build() .with_context(|| "could not configure grpc reflection service")?)); - Ok(builder.routes().prepare()) + + Ok(grpc_server) } diff --git a/crates/core/app/src/server/consensus.rs b/crates/core/app/src/server/consensus.rs index 7f5c7983a3..5eae5010fc 100644 --- a/crates/core/app/src/server/consensus.rs +++ b/crates/core/app/src/server/consensus.rs @@ -25,10 +25,7 @@ fn trace_events(events: &[Event]) { let span = tracing::debug_span!("event", kind = ?event.kind); span.in_scope(|| { for attr in &event.attributes { - tracing::debug!( - k = %String::from_utf8_lossy(attr.key_bytes()), - v = %String::from_utf8_lossy(attr.value_bytes()), - ); + tracing::debug!(k = ?attr.key, v=?attr.value); } }) } diff --git a/crates/core/app/src/server/events.rs b/crates/core/app/src/server/events.rs index 7c8732ce9e..906f3d855c 100644 --- a/crates/core/app/src/server/events.rs +++ b/crates/core/app/src/server/events.rs @@ -38,27 +38,14 @@ impl EventIndexLayer { // Perform matching on a nested key in the same format used by // the cosmos SDK: https://docs.cosmos.network/main/core/config // e.g., "message.sender", "message.recipient" + let nested_key = format!("{}.{}", e.kind, attr.key); - // XXX: tendermint-rs now supports v034 events types, which are - // plain bytes and need not be utf8. This messes with the - // regex and is likely not desirable to have in downstream - // consumers of the indexed events. - match attr.key_str() { - Ok(key) => { - let nested_key = format!("{}.{}", e.kind, key); - - if self.no_index.is_match(&nested_key) { - attr.set_index(false) - } - // This comes second so that explicit index requests take priority over no-index requests. - if self.index.is_match(&nested_key) { - attr.set_index(false) - } - } - // TODO: what should be done with this error? Emit a warning? - Err(_err) => { - attr.set_index(false); - } + if self.no_index.is_match(&nested_key) { + attr.index = false; + } + // This comes second so that explicit index requests take priority over no-index requests. + if self.index.is_match(&nested_key) { + attr.index = true; } } } diff --git a/crates/core/app/src/server/info.rs b/crates/core/app/src/server/info.rs index 63c3c7ae88..90e090a7c8 100644 --- a/crates/core/app/src/server/info.rs +++ b/crates/core/app/src/server/info.rs @@ -259,7 +259,6 @@ impl Info { channel_id: chan_id, port_id: PortId::transfer(), channel_end: channel, - upgrade_sequence: 0, }; channels.push(id_chan.into()); } @@ -309,7 +308,6 @@ impl Info { channel_id: chan_id, port_id: PortId::transfer(), channel_end: channel, - upgrade_sequence: 0, }; channels.push(id_chan.into()); } diff --git a/crates/core/app/tests/app_can_sweep_a_collection_of_small_notes.rs b/crates/core/app/tests/app_can_sweep_a_collection_of_small_notes.rs index 1a2ae423cb..dfe096bdde 100644 --- a/crates/core/app/tests/app_can_sweep_a_collection_of_small_notes.rs +++ b/crates/core/app/tests/app_can_sweep_a_collection_of_small_notes.rs @@ -95,12 +95,12 @@ async fn app_can_sweep_a_collection_of_small_notes() -> anyhow::Result<()> { // Spawn the server-side view server. { - let make_svc = penumbra_app::rpc::routes( + let make_svc = penumbra_app::rpc::router( storage.as_ref(), proxy, false, /*enable_expensive_rpc*/ )? - .into_axum_router() + .into_router() .layer(tower_http::cors::CorsLayer::permissive()) .into_make_service() .tap(|_| tracing::debug!("initialized rpc service")); diff --git a/crates/core/app/tests/common/ibc_tests/node.rs b/crates/core/app/tests/common/ibc_tests/node.rs index 971b798bed..5178c7dd3f 100644 --- a/crates/core/app/tests/common/ibc_tests/node.rs +++ b/crates/core/app/tests/common/ibc_tests/node.rs @@ -112,12 +112,12 @@ impl TestNodeWithIBC { tracing::info!("spawning gRPC..."); // Spawn the node's RPC server. let _rpc_server = { - let make_svc = penumbra_app::rpc::routes( + let make_svc = penumbra_app::rpc::router( storage.as_ref(), proxy, false, /*enable_expensive_rpc*/ )? - .into_axum_router() + .into_router() .layer(tower_http::cors::CorsLayer::permissive()) .into_make_service() .tap(|_| tracing::info!("initialized rpc service")); @@ -125,7 +125,6 @@ impl TestNodeWithIBC { .socket_addrs(|| None)? .try_into() .expect("grpc url can be turned into a socket address"); - let server = axum_server::bind(addr).serve(make_svc); tokio::spawn(async { server.await.expect("grpc server returned an error") }) .tap(|_| tracing::info!("grpc server is running")) diff --git a/crates/core/app/tests/common/ibc_tests/relayer.rs b/crates/core/app/tests/common/ibc_tests/relayer.rs index 49e8307362..4b76f6d0d3 100644 --- a/crates/core/app/tests/common/ibc_tests/relayer.rs +++ b/crates/core/app/tests/common/ibc_tests/relayer.rs @@ -1562,18 +1562,16 @@ impl MockRelayer { let mut timeout_height_on_b = None; let mut timeout_timestamp_on_b = None; for attr in &event.attributes { - match attr.key_str()? { - "packet_data_hex" => packet_data_hex = Some(attr.value_str()?.to_string()), - "packet_sequence" => sequence = Some(attr.value_str()?.to_string()), - "packet_src_port" => port_on_a = Some(attr.value_str()?.to_string()), - "packet_src_channel" => chan_on_a = Some(attr.value_str()?.to_string()), - "packet_dst_port" => port_on_b = Some(attr.value_str()?.to_string()), - "packet_dst_channel" => chan_on_b = Some(attr.value_str()?.to_string()), - "packet_timeout_height" => { - timeout_height_on_b = Some(attr.value_str()?.to_string()) - } + match attr.key.as_str() { + "packet_data_hex" => packet_data_hex = Some(attr.value.clone()), + "packet_sequence" => sequence = Some(attr.value.clone()), + "packet_src_port" => port_on_a = Some(attr.value.clone()), + "packet_src_channel" => chan_on_a = Some(attr.value.clone()), + "packet_dst_port" => port_on_b = Some(attr.value.clone()), + "packet_dst_channel" => chan_on_b = Some(attr.value.clone()), + "packet_timeout_height" => timeout_height_on_b = Some(attr.value.clone()), "packet_timeout_timestamp" => { - timeout_timestamp_on_b = Some(attr.value_str()?.to_string()) + timeout_timestamp_on_b = Some(attr.value.clone()) } _ => (), } @@ -1675,20 +1673,18 @@ impl MockRelayer { let mut timeout_timestamp_on_b = None; let mut packet_ack_hex = None; for attr in &event.attributes { - match attr.key_str()? { - "packet_data_hex" => packet_data_hex = Some(attr.value_str()?.to_string()), - "packet_sequence" => sequence = Some(attr.value_str()?.to_string()), - "packet_src_port" => port_on_a = Some(attr.value_str()?.to_string()), - "packet_src_channel" => chan_on_a = Some(attr.value_str()?.to_string()), - "packet_dst_port" => port_on_b = Some(attr.value_str()?.to_string()), - "packet_dst_channel" => chan_on_b = Some(attr.value_str()?.to_string()), - "packet_timeout_height" => { - timeout_height_on_b = Some(attr.value_str()?.to_string()) - } + match attr.key.as_str() { + "packet_data_hex" => packet_data_hex = Some(attr.value.clone()), + "packet_sequence" => sequence = Some(attr.value.clone()), + "packet_src_port" => port_on_a = Some(attr.value.clone()), + "packet_src_channel" => chan_on_a = Some(attr.value.clone()), + "packet_dst_port" => port_on_b = Some(attr.value.clone()), + "packet_dst_channel" => chan_on_b = Some(attr.value.clone()), + "packet_timeout_height" => timeout_height_on_b = Some(attr.value.clone()), "packet_timeout_timestamp" => { - timeout_timestamp_on_b = Some(attr.value_str()?.to_string()) + timeout_timestamp_on_b = Some(attr.value.clone()) } - "packet_ack_hex" => packet_ack_hex = Some(attr.value_str()?.to_string()), + "packet_ack_hex" => packet_ack_hex = Some(attr.value.clone()), _ => (), } } diff --git a/crates/core/app/tests/mock_consensus_block_proving.rs b/crates/core/app/tests/mock_consensus_block_proving.rs index 6966ba96f3..f522020769 100644 --- a/crates/core/app/tests/mock_consensus_block_proving.rs +++ b/crates/core/app/tests/mock_consensus_block_proving.rs @@ -1,8 +1,5 @@ use { anyhow::Context as _, - cnidarium::proto::v1::{ - query_service_client::QueryServiceClient as CnidariumQueryServiceClient, KeyValueRequest, - }, cnidarium::{StateRead as _, TempStorage}, common::{BuilderExt as _, TempStorageExt as _}, ibc_proto::ibc::core::client::v1::{ @@ -26,6 +23,10 @@ use { penumbra_mock_client::MockClient, penumbra_mock_consensus::TestNode, penumbra_proto::{ + cnidarium::v1::{ + query_service_client::QueryServiceClient as CnidariumQueryServiceClient, + KeyValueRequest, + }, util::tendermint_proxy::v1::{ tendermint_proxy_service_client::TendermintProxyServiceClient, GetBlockByHeightRequest, }, @@ -190,8 +191,8 @@ async fn verify_storage_proof_simple() -> anyhow::Result<()> { // Spawn the node's RPC server. let _rpc_server = { let make_svc = - penumbra_app::rpc::routes(&storage, proxy, false /*enable_expensive_rpc*/)? - .into_axum_router() + penumbra_app::rpc::router(&storage, proxy, false /*enable_expensive_rpc*/)? + .into_router() .layer(tower_http::cors::CorsLayer::permissive()) .into_make_service() .tap(|_| println!("initialized rpc service")); diff --git a/crates/core/app/tests/view_server_can_be_served_on_localhost.rs b/crates/core/app/tests/view_server_can_be_served_on_localhost.rs index 4603aadf1c..6ef2571ac3 100644 --- a/crates/core/app/tests/view_server_can_be_served_on_localhost.rs +++ b/crates/core/app/tests/view_server_can_be_served_on_localhost.rs @@ -71,12 +71,12 @@ async fn view_server_can_be_served_on_localhost() -> anyhow::Result<()> { // Spawn the server-side view server. { - let make_svc = penumbra_app::rpc::routes( + let make_svc = penumbra_app::rpc::router( storage.as_ref(), proxy, false, /*enable_expensive_rpc*/ )? - .into_axum_router() + .into_router() .layer(tower_http::cors::CorsLayer::permissive()) .into_make_service() .tap(|_| tracing::debug!("initialized rpc service")); diff --git a/crates/core/component/auction/Cargo.toml b/crates/core/component/auction/Cargo.toml index 65cc933d2f..df57c248d2 100644 --- a/crates/core/component/auction/Cargo.toml +++ b/crates/core/component/auction/Cargo.toml @@ -55,7 +55,6 @@ hex = {workspace = true} im = {workspace = true, optional = true} metrics = {workspace = true} once_cell = {workspace = true} -pbjson-types = { workspace = true } penumbra-asset = {workspace = true, default-features = false} penumbra-keys = {workspace = true, default-features = false} penumbra-num = {workspace = true, default-features = false} @@ -77,6 +76,7 @@ tendermint = {workspace = true, default-features = true} tokio = {workspace = true, features = ["full", "tracing"], optional = true} tonic = {workspace = true, optional = true} tracing = {workspace = true} +pbjson-types = "0.6.0" [dev-dependencies] ed25519-consensus = {workspace = true} diff --git a/crates/core/component/ibc/src/component/msg_handler/channel_close_confirm.rs b/crates/core/component/ibc/src/component/msg_handler/channel_close_confirm.rs index 1cd8eff2d9..8d8e53fd0e 100644 --- a/crates/core/component/ibc/src/component/msg_handler/channel_close_confirm.rs +++ b/crates/core/component/ibc/src/component/msg_handler/channel_close_confirm.rs @@ -70,7 +70,6 @@ impl MsgHandler for MsgChannelCloseConfirm { remote: expected_counterparty, connection_hops: expected_connection_hops, version: channel.version.clone(), - ..ChannelEnd::default() }; state diff --git a/crates/core/component/ibc/src/component/msg_handler/channel_open_ack.rs b/crates/core/component/ibc/src/component/msg_handler/channel_open_ack.rs index 4f9c96b8f4..3e7accd673 100644 --- a/crates/core/component/ibc/src/component/msg_handler/channel_open_ack.rs +++ b/crates/core/component/ibc/src/component/msg_handler/channel_open_ack.rs @@ -58,7 +58,6 @@ impl MsgHandler for MsgChannelOpenAck { remote: expected_counterparty, connection_hops: expected_connection_hops, version: self.version_on_b.clone(), - ..ChannelEnd::default() }; state diff --git a/crates/core/component/ibc/src/component/msg_handler/channel_open_confirm.rs b/crates/core/component/ibc/src/component/msg_handler/channel_open_confirm.rs index b941ebd9ac..3d5f47bc6e 100644 --- a/crates/core/component/ibc/src/component/msg_handler/channel_open_confirm.rs +++ b/crates/core/component/ibc/src/component/msg_handler/channel_open_confirm.rs @@ -65,7 +65,6 @@ impl MsgHandler for MsgChannelOpenConfirm { remote: expected_counterparty, connection_hops: expected_connection_hops, version: channel.version.clone(), - ..ChannelEnd::default() }; state diff --git a/crates/core/component/ibc/src/component/msg_handler/channel_open_init.rs b/crates/core/component/ibc/src/component/msg_handler/channel_open_init.rs index e97c1697b9..6956aabf7a 100644 --- a/crates/core/component/ibc/src/component/msg_handler/channel_open_init.rs +++ b/crates/core/component/ibc/src/component/msg_handler/channel_open_init.rs @@ -57,7 +57,6 @@ impl MsgHandler for MsgChannelOpenInit { remote: Counterparty::new(self.port_id_on_b.clone(), None), connection_hops: self.connection_hops_on_a.clone(), version: self.version_proposal.clone(), - ..ChannelEnd::default() }; state.put_channel(&channel_id, &self.port_id_on_a, new_channel.clone()); diff --git a/crates/core/component/ibc/src/component/msg_handler/channel_open_try.rs b/crates/core/component/ibc/src/component/msg_handler/channel_open_try.rs index 8bffd067fd..3432c98760 100644 --- a/crates/core/component/ibc/src/component/msg_handler/channel_open_try.rs +++ b/crates/core/component/ibc/src/component/msg_handler/channel_open_try.rs @@ -49,7 +49,6 @@ impl MsgHandler for MsgChannelOpenTry { .clone() .ok_or_else(|| anyhow::anyhow!("no counterparty connection id provided"))?], version: self.version_supported_on_a.clone(), - ..ChannelEnd::default() }; tracing::debug!(?self, ?expected_channel_on_a); @@ -82,7 +81,6 @@ impl MsgHandler for MsgChannelOpenTry { remote: Counterparty::new(self.port_id_on_a.clone(), Some(self.chan_id_on_a.clone())), connection_hops: self.connection_hops_on_b.clone(), version: self.version_supported_on_a.clone(), - ..ChannelEnd::default() }; state.put_channel(&channel_id, &self.port_id_on_b, new_channel.clone()); diff --git a/crates/core/component/ibc/src/component/rpc.rs b/crates/core/component/ibc/src/component/rpc.rs index acb38b1bdb..4b54f52105 100644 --- a/crates/core/component/ibc/src/component/rpc.rs +++ b/crates/core/component/ibc/src/component/rpc.rs @@ -1,5 +1,5 @@ use cnidarium::Storage; -use tonic::service::Routes; +use tonic::transport::server::Routes; use super::HostInterface; diff --git a/crates/core/component/ibc/src/component/rpc/consensus_query.rs b/crates/core/component/ibc/src/component/rpc/consensus_query.rs index 5336d14204..4b176acfb4 100644 --- a/crates/core/component/ibc/src/component/rpc/consensus_query.rs +++ b/crates/core/component/ibc/src/component/rpc/consensus_query.rs @@ -4,8 +4,7 @@ use async_trait::async_trait; use ibc_proto::ibc::core::channel::v1::query_server::Query as ConsensusQuery; use ibc_proto::ibc::core::channel::v1::{ Channel, PacketState, QueryChannelClientStateRequest, QueryChannelClientStateResponse, - QueryChannelConsensusStateRequest, QueryChannelConsensusStateResponse, - QueryChannelParamsRequest, QueryChannelParamsResponse, QueryChannelRequest, + QueryChannelConsensusStateRequest, QueryChannelConsensusStateResponse, QueryChannelRequest, QueryChannelResponse, QueryChannelsRequest, QueryChannelsResponse, QueryConnectionChannelsRequest, QueryConnectionChannelsResponse, QueryNextSequenceReceiveRequest, QueryNextSequenceReceiveResponse, @@ -15,7 +14,6 @@ use ibc_proto::ibc::core::channel::v1::{ QueryPacketCommitmentResponse, QueryPacketCommitmentsRequest, QueryPacketCommitmentsResponse, QueryPacketReceiptRequest, QueryPacketReceiptResponse, QueryUnreceivedAcksRequest, QueryUnreceivedAcksResponse, QueryUnreceivedPacketsRequest, QueryUnreceivedPacketsResponse, - QueryUpgradeErrorRequest, QueryUpgradeErrorResponse, QueryUpgradeRequest, QueryUpgradeResponse, }; use ibc_proto::ibc::core::client::v1::{Height, IdentifiedClientState}; use ibc_types::path::{ @@ -132,7 +130,6 @@ impl ConsensusQuery for IbcQuery channel_id: chan_id, port_id: PortId::transfer(), channel_end: channel, - upgrade_sequence: 0, }; channels.push(id_chan.into()); } @@ -190,7 +187,6 @@ impl ConsensusQuery for IbcQuery channel_id: chan_id, port_id: PortId::transfer(), channel_end: channel, - upgrade_sequence: 0, }; channels.push(id_chan.into()); } @@ -860,28 +856,4 @@ impl ConsensusQuery for IbcQuery }), })) } - - #[tracing::instrument(skip(self), err, level = "debug")] - async fn channel_params( - &self, - _request: tonic::Request, - ) -> std::result::Result, tonic::Status> { - Err(tonic::Status::unimplemented("not implemented")) - } - - #[tracing::instrument(skip(self), err, level = "debug")] - async fn upgrade( - &self, - _request: tonic::Request, - ) -> std::result::Result, tonic::Status> { - Err(tonic::Status::unimplemented("not implemented")) - } - - #[tracing::instrument(skip(self), err, level = "debug")] - async fn upgrade_error( - &self, - _request: tonic::Request, - ) -> std::result::Result, tonic::Status> { - Err(tonic::Status::unimplemented("not implemented")) - } } diff --git a/crates/core/component/shielded-pool/Cargo.toml b/crates/core/component/shielded-pool/Cargo.toml index 58deecfe28..d1a9b72f81 100644 --- a/crates/core/component/shielded-pool/Cargo.toml +++ b/crates/core/component/shielded-pool/Cargo.toml @@ -12,7 +12,6 @@ component = [ "penumbra-sct/component", "tonic", "ibc-proto/server", - "ibc-proto/transport", ] # proving-keys = ["penumbra-proof-params/proving-keys"] default = ["std", "component"] diff --git a/crates/core/component/shielded-pool/src/component/rpc/bank_query.rs b/crates/core/component/shielded-pool/src/component/rpc/bank_query.rs index a675402ecd..2af2edae7c 100644 --- a/crates/core/component/shielded-pool/src/component/rpc/bank_query.rs +++ b/crates/core/component/shielded-pool/src/component/rpc/bank_query.rs @@ -9,11 +9,9 @@ use ibc_proto::cosmos::bank::v1beta1::{ QueryTotalSupplyRequest, QueryTotalSupplyResponse, }; use ibc_proto::cosmos::bank::v1beta1::{ - QueryDenomMetadataByQueryStringRequest, QueryDenomMetadataByQueryStringResponse, - QueryDenomMetadataRequest, QueryDenomMetadataResponse, QueryDenomOwnersByQueryRequest, - QueryDenomOwnersByQueryResponse, QueryDenomOwnersRequest, QueryDenomOwnersResponse, - QueryDenomsMetadataRequest, QueryDenomsMetadataResponse, QuerySendEnabledRequest, - QuerySendEnabledResponse, QuerySpendableBalanceByDenomRequest, + QueryDenomMetadataRequest, QueryDenomMetadataResponse, QueryDenomOwnersRequest, + QueryDenomOwnersResponse, QueryDenomsMetadataRequest, QueryDenomsMetadataResponse, + QuerySendEnabledRequest, QuerySendEnabledResponse, QuerySpendableBalanceByDenomRequest, QuerySpendableBalanceByDenomResponse, QuerySpendableBalancesRequest, QuerySpendableBalancesResponse, QuerySupplyOfRequest, QuerySupplyOfResponse, }; @@ -209,18 +207,4 @@ impl BankQuery for Server { ) -> std::result::Result, tonic::Status> { Err(tonic::Status::unimplemented("not implemented")) } - - async fn denom_metadata_by_query_string( - &self, - _: tonic::Request, - ) -> Result, tonic::Status> { - Err(tonic::Status::unimplemented("not implemented")) - } - - async fn denom_owners_by_query( - &self, - _: tonic::Request, - ) -> Result, tonic::Status> { - Err(tonic::Status::unimplemented("not implemented")) - } } diff --git a/crates/core/keys/src/symmetric.rs b/crates/core/keys/src/symmetric.rs index e16e726f74..cc04585ed4 100644 --- a/crates/core/keys/src/symmetric.rs +++ b/crates/core/keys/src/symmetric.rs @@ -135,6 +135,7 @@ impl TryFrom<&[u8]> for PayloadKey { fn try_from(slice: &[u8]) -> Result { let bytes: [u8; PAYLOAD_KEY_LEN_BYTES] = slice + .as_ref() .try_into() .map_err(|_| anyhow::anyhow!("PayloadKey incorrect len"))?; Ok(Self(*Key::from_slice(&bytes))) diff --git a/crates/crypto/proof-params/Cargo.toml b/crates/crypto/proof-params/Cargo.toml index 8199e5caef..af3bdaf815 100644 --- a/crates/crypto/proof-params/Cargo.toml +++ b/crates/crypto/proof-params/Cargo.toml @@ -5,7 +5,7 @@ edition = {workspace = true} [build-dependencies] regex = { version = "1", optional = true } -reqwest = { version = "0.12.9", optional = true, features = [ +reqwest = { version = "0.11.14", optional = true, features = [ "blocking", "json", ] } diff --git a/crates/misc/measure/Cargo.toml b/crates/misc/measure/Cargo.toml index f60b627eb3..ef48253a4b 100644 --- a/crates/misc/measure/Cargo.toml +++ b/crates/misc/measure/Cargo.toml @@ -21,7 +21,6 @@ indicatif = {workspace = true} penumbra-compact-block = {workspace = true, default-features = false} penumbra-proto = {workspace = true, features = ["rpc"], default-features = true} penumbra-view = {workspace = true} -rustls = {workspace = true} serde_json = {workspace = true} tokio = {workspace = true, features = ["full"]} tonic = {workspace = true, features = ["tls"]} diff --git a/crates/misc/measure/src/main.rs b/crates/misc/measure/src/main.rs index 9cd9e7ec5d..38211fe3d2 100644 --- a/crates/misc/measure/src/main.rs +++ b/crates/misc/measure/src/main.rs @@ -4,7 +4,6 @@ extern crate tracing; use std::path::PathBuf; use clap::Parser; -use rustls::crypto::aws_lc_rs; use tracing::Instrument; use tracing_subscriber::EnvFilter; @@ -22,9 +21,8 @@ use penumbra_proto::{ }, DomainType, Message, }; -use penumbra_view::ViewServer; -use tonic::transport::Channel; +use tonic::transport::{Channel, ClientTlsConfig}; use url::Url; // The expected maximum size of a compact block message. @@ -217,7 +215,9 @@ impl Opt { } } Command::StreamBlocks { skip_genesis } => { - let channel = ViewServer::get_pd_channel(self.node.clone()).await?; + let channel = Channel::from_shared(self.node.to_string())? + .connect() + .await?; let mut cb_client = CompactBlockQueryServiceClient::new(channel.clone()) .max_decoding_message_size(MAX_CB_SIZE_BYTES); @@ -306,21 +306,25 @@ impl Opt { } } -// Wrapper for the `get_pd_channel` method from the view crate. +// This code is ripped from the pcli code, and could be split out into something common. async fn get_tendermint_proxy_client( pd_url: Url, ) -> anyhow::Result> { - let pd_channel = ViewServer::get_pd_channel(pd_url).await?; + let pd_channel: Channel = match pd_url.scheme() { + "http" => Channel::from_shared(pd_url.to_string())?.connect().await?, + "https" => { + Channel::from_shared(pd_url.to_string())? + .tls_config(ClientTlsConfig::new())? + .connect() + .await? + } + other => anyhow::bail!(format!("unknown url scheme {other}")), + }; Ok(TendermintProxyServiceClient::new(pd_channel)) } #[tokio::main] async fn main() -> anyhow::Result<()> { - // Initialize HTTPS support - aws_lc_rs::default_provider() - .install_default() - .expect("failed to initialize rustls support, via aws-lc-rs"); - let mut opt = Opt::parse(); opt.init_tracing(); opt.run().await?; diff --git a/crates/misc/tct-visualize/Cargo.toml b/crates/misc/tct-visualize/Cargo.toml index adb96c663f..cbd2f4ffac 100644 --- a/crates/misc/tct-visualize/Cargo.toml +++ b/crates/misc/tct-visualize/Cargo.toml @@ -21,9 +21,7 @@ tokio-util = {workspace = true, features = ["full"]} tonic = {workspace = true} prost = {workspace = true} tokio-stream = {workspace = true} -axum = {workspace = true } -# TODO: consider using the Query extractor provided by axum-extra -axum-extra = {workspace = true, features = ["typed-header"] } +axum = {workspace = true, features = ["headers", "query"]} axum-server = {workspace = true, features = ["tls-rustls"]} serde_json = {workspace = true, features = ["preserve_order"]} include-flate = { version = "0.1", features = ["stable"] } diff --git a/crates/misc/tct-visualize/src/bin/tct-live-edit.rs b/crates/misc/tct-visualize/src/bin/tct-live-edit.rs index d529c91e3c..e09b75cb9a 100644 --- a/crates/misc/tct-visualize/src/bin/tct-live-edit.rs +++ b/crates/misc/tct-visualize/src/bin/tct-live-edit.rs @@ -1,7 +1,6 @@ use std::{path::PathBuf, sync::Arc}; -use axum::{routing::get, Router}; -use axum_extra::{headers::ContentType, TypedHeader}; +use axum::{headers::ContentType, routing::get, Router, TypedHeader}; use axum_server::tls_rustls::RustlsConfig; use clap::Parser; use include_flate::flate; @@ -80,8 +79,8 @@ async fn main() -> anyhow::Result<()> { .unwrap() } (None, None) => { - let listener = tokio::net::TcpListener::bind(&address).await.unwrap(); - axum::serve(listener, app.into_make_service()) + axum::Server::bind(&address) + .serve(app.into_make_service()) .await .unwrap(); } diff --git a/crates/misc/tct-visualize/src/live/view.rs b/crates/misc/tct-visualize/src/live/view.rs index 40606a0b7f..dc657b8b76 100644 --- a/crates/misc/tct-visualize/src/live/view.rs +++ b/crates/misc/tct-visualize/src/live/view.rs @@ -1,20 +1,20 @@ use std::{convert::Infallible, sync::Arc, time::Duration}; use axum::{ - body::Body, + body::StreamBody, extract::{OriginalUri, Path, Query}, + headers::ContentType, http::StatusCode, response::{ sse::{self, KeepAlive}, Sse, }, routing::{get, MethodRouter}, - Router, + Router, TypedHeader, }; -use axum_extra::{headers::ContentType, TypedHeader}; use bytes::Bytes; -use futures::{future, stream}; +use futures::stream; use serde_json::json; use tokio::sync::{mpsc, watch}; use tokio_stream::{wrappers::ReceiverStream, StreamExt}; @@ -262,14 +262,15 @@ fn render_dot(mut tree: watch::Receiver) -> MethodRouter { if !query.graph { return Ok::<_, (StatusCode, String)>(( TypedHeader(ContentType::json()), - Body::from_stream(stream::once(future::ok::<_, Infallible>( - json!({ + StreamBody::new( + stream::iter(vec![json!({ "position": tree.borrow().position(), "forgotten": tree.borrow().forgotten(), }) .to_string() - .into_bytes(), - ))), + .into()]) + .map(Ok), + ), )); } @@ -333,7 +334,7 @@ fn render_dot(mut tree: watch::Receiver) -> MethodRouter { // Manually construct a streaming response to avoid allocating a copy of the large // rendered bytes: the graph is already rendered as a JSON-escaped string, so we include // it literally in this output - Body::from_stream( + StreamBody::new( stream::iter(vec![ "{".into(), "\"position\":".into(), diff --git a/crates/proto/Cargo.toml b/crates/proto/Cargo.toml index ca137d5709..64956328d2 100644 --- a/crates/proto/Cargo.toml +++ b/crates/proto/Cargo.toml @@ -8,7 +8,7 @@ anyhow = "1" [features] rpc = ["dep:tonic", "ibc-proto/client"] -box-grpc = ["dep:http-body", "dep:http-body-util", "dep:tonic", "dep:tower"] +box-grpc = ["dep:http-body", "dep:tonic", "dep:tower"] cnidarium = ["dep:cnidarium"] tendermint = [ "dep:chrono", @@ -30,7 +30,6 @@ decaf377-rdsa = {workspace = true} futures = {workspace = true} hex = {workspace = true} http-body = {workspace = true, optional = true} -http-body-util = {workspace = true, optional = true} ibc-types = {workspace = true, features = ["std"], default-features = true} ics23 = {workspace = true} pbjson = {workspace = true} diff --git a/crates/proto/src/box_grpc_svc.rs b/crates/proto/src/box_grpc_svc.rs index 7c02a015dc..1ce117449f 100644 --- a/crates/proto/src/box_grpc_svc.rs +++ b/crates/proto/src/box_grpc_svc.rs @@ -1,7 +1,10 @@ use bytes::Bytes; -use http_body::Body; -use http_body_util::{combinators::UnsyncBoxBody, BodyExt as _}; -use tonic::{body::BoxBody as ReqBody, codegen::http as grpc, transport::Endpoint}; +use http_body::{combinators::UnsyncBoxBody, Body}; +use tonic::{ + body::BoxBody as ReqBody, + codegen::http as grpc, + transport::{self, Endpoint}, +}; use tower::{util::BoxCloneService, Service, ServiceBuilder}; /// A type-erased gRPC service. @@ -17,7 +20,7 @@ pub type RspBody = UnsyncBoxBody; pub async fn connect(ep: Endpoint) -> anyhow::Result { let conn = ep.connect().await?; let svc = ServiceBuilder::new() - .map_response(|rsp: grpc::Response| rsp.map(box_rsp_body)) + .map_response(|rsp: grpc::Response| rsp.map(box_rsp_body)) .map_err(BoxError::from) .service(conn); Ok(BoxCloneService::new(svc)) diff --git a/crates/proto/src/event.rs b/crates/proto/src/event.rs index ce74895ce4..e7e69fefa4 100644 --- a/crates/proto/src/event.rs +++ b/crates/proto/src/event.rs @@ -16,19 +16,16 @@ pub trait ProtoEvent: Message + Name + Serialize + DeserializeOwned + Sized { .as_object() .expect("serde_json Serialized ProtoEvent should not be empty.") .into_iter() - .map(|(key, v)| { - abci::EventAttribute::V037(abci::v0_37::EventAttribute { - value: serde_json::to_string(v) - .expect("must be able to serialize value as JSON"), - key: key.to_string(), - index: true, - }) + .map(|(key, v)| abci::EventAttribute { + value: serde_json::to_string(v).expect("must be able to serialize value as JSON"), + key: key.to_string(), + index: true, }) .collect(); // NOTE: cosmo-sdk sorts the attribute list so that it's deterministic every time.[0] I don't know if that is actually conformant but continuing that pattern here for now. // [0]: https://github.com/cosmos/cosmos-sdk/blob/8fb62054c59e580c0ae0c898751f8dc46044499a/types/events.go#L102-L104 - attributes.sort_by(|a, b| (&a.key_bytes()).cmp(&b.key_bytes())); + attributes.sort_by(|a, b| (&a.key).cmp(&b.key)); return abci::Event::new(kind, attributes); } @@ -46,9 +43,9 @@ pub trait ProtoEvent: Message + Name + Serialize + DeserializeOwned + Sized { // NOTE: Is there any condition where there would be duplicate EventAttributes and problems that fall out of that? let mut attributes = HashMap::::new(); for attr in &event.attributes { - let value = serde_json::from_slice(attr.value_bytes()) + let value = serde_json::from_str(&attr.value) .with_context(|| format!("could not parse JSON for attribute {:?}", attr))?; - attributes.insert(String::from_utf8_lossy(attr.key_bytes()).into(), value); + attributes.insert(attr.key.clone(), value); } let json = serde_json::to_value(attributes) @@ -84,11 +81,11 @@ mod tests { let expected_abci_spend = abci::Event::new( "penumbra.core.component.shielded_pool.v1.EventSpend", - [abci::EventAttribute::V037(abci::v0_37::EventAttribute { + vec![abci::EventAttribute { key: "nullifier".to_string(), value: "{\"inner\":\"lL6VF1ZxmJFo8o6i6e+JjYyktGKaN6j/o+SzsBoZ29M=\"}".to_string(), index: true, - })], + }], ); assert_eq!(abci_spend, expected_abci_spend); @@ -110,13 +107,13 @@ mod tests { let expected_abci_output = abci::Event::new( "penumbra.core.component.shielded_pool.v1.EventOutput", - [abci::EventAttribute::V037(abci::v0_37::EventAttribute { + vec![abci::EventAttribute { // note: attribute keys become camelCase because ProtoJSON... key: "noteCommitment".to_string(), // note: attribute values are JSON objects, potentially nested as here value: "{\"inner\":\"lL6VF1ZxmJFo8o6i6e+JjYyktGKaN6j/o+SzsBoZ29M=\"}".to_string(), index: true, - })], + }], ); assert_eq!(abci_output, expected_abci_output); diff --git a/crates/proto/src/gen/cosmos.app.v1alpha1.rs b/crates/proto/src/gen/cosmos.app.v1alpha1.rs index a2b1826bb6..de74556a7a 100644 --- a/crates/proto/src/gen/cosmos.app.v1alpha1.rs +++ b/crates/proto/src/gen/cosmos.app.v1alpha1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// ModuleDescriptor describes an app module. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ModuleDescriptor { /// go_import names the package that should be imported by an app to load the @@ -27,13 +27,11 @@ impl ::prost::Name for ModuleDescriptor { const NAME: &'static str = "ModuleDescriptor"; const PACKAGE: &'static str = "cosmos.app.v1alpha1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.app.v1alpha1.ModuleDescriptor".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.app.v1alpha1.ModuleDescriptor".into() + ::prost::alloc::format!("cosmos.app.v1alpha1.{}", Self::NAME) } } /// PackageReference is a reference to a protobuf package used by a module. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PackageReference { /// name is the fully-qualified name of the package. @@ -82,14 +80,12 @@ impl ::prost::Name for PackageReference { const NAME: &'static str = "PackageReference"; const PACKAGE: &'static str = "cosmos.app.v1alpha1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.app.v1alpha1.PackageReference".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.app.v1alpha1.PackageReference".into() + ::prost::alloc::format!("cosmos.app.v1alpha1.{}", Self::NAME) } } /// MigrateFromInfo is information on a module version that a newer module /// can migrate from. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MigrateFromInfo { /// module is the fully-qualified protobuf name of the module config object @@ -101,9 +97,6 @@ impl ::prost::Name for MigrateFromInfo { const NAME: &'static str = "MigrateFromInfo"; const PACKAGE: &'static str = "cosmos.app.v1alpha1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.app.v1alpha1.MigrateFromInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.app.v1alpha1.MigrateFromInfo".into() + ::prost::alloc::format!("cosmos.app.v1alpha1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/cosmos.auth.v1beta1.rs b/crates/proto/src/gen/cosmos.auth.v1beta1.rs index eb18f17fee..494ee21f5f 100644 --- a/crates/proto/src/gen/cosmos.auth.v1beta1.rs +++ b/crates/proto/src/gen/cosmos.auth.v1beta1.rs @@ -1,7 +1,7 @@ -// This file is @generated by prost-build. /// BaseAccount defines a base account type. It contains all the necessary fields /// for basic account functionality. Any custom account type should extend this /// type for additional functionality (e.g. vesting). +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BaseAccount { #[prost(string, tag = "1")] @@ -17,13 +17,11 @@ impl ::prost::Name for BaseAccount { const NAME: &'static str = "BaseAccount"; const PACKAGE: &'static str = "cosmos.auth.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.auth.v1beta1.BaseAccount".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.auth.v1beta1.BaseAccount".into() + ::prost::alloc::format!("cosmos.auth.v1beta1.{}", Self::NAME) } } /// ModuleAccount defines an account for modules that holds coins on a pool. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ModuleAccount { #[prost(message, optional, tag = "1")] @@ -37,15 +35,13 @@ impl ::prost::Name for ModuleAccount { const NAME: &'static str = "ModuleAccount"; const PACKAGE: &'static str = "cosmos.auth.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.auth.v1beta1.ModuleAccount".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.auth.v1beta1.ModuleAccount".into() + ::prost::alloc::format!("cosmos.auth.v1beta1.{}", Self::NAME) } } /// ModuleCredential represents a unclaimable pubkey for base accounts controlled by modules. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ModuleCredential { /// module_name is the name of the module used for address derivation (passed into address.Module). @@ -60,14 +56,12 @@ impl ::prost::Name for ModuleCredential { const NAME: &'static str = "ModuleCredential"; const PACKAGE: &'static str = "cosmos.auth.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.auth.v1beta1.ModuleCredential".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.auth.v1beta1.ModuleCredential".into() + ::prost::alloc::format!("cosmos.auth.v1beta1.{}", Self::NAME) } } /// Params defines the parameters for the auth module. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Params { #[prost(uint64, tag = "1")] pub max_memo_characters: u64, @@ -84,9 +78,6 @@ impl ::prost::Name for Params { const NAME: &'static str = "Params"; const PACKAGE: &'static str = "cosmos.auth.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.auth.v1beta1.Params".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.auth.v1beta1.Params".into() + ::prost::alloc::format!("cosmos.auth.v1beta1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/cosmos.bank.v1beta1.rs b/crates/proto/src/gen/cosmos.bank.v1beta1.rs index 113e4a0aa6..1fdd67aa3c 100644 --- a/crates/proto/src/gen/cosmos.bank.v1beta1.rs +++ b/crates/proto/src/gen/cosmos.bank.v1beta1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// Params defines the parameters for the bank module. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Params { /// Deprecated: Use of SendEnabled in params is deprecated. @@ -17,14 +17,12 @@ impl ::prost::Name for Params { const NAME: &'static str = "Params"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.Params".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.Params".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// SendEnabled maps coin denom to a send_enabled status (whether a denom is /// sendable). +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SendEnabled { #[prost(string, tag = "1")] @@ -36,13 +34,11 @@ impl ::prost::Name for SendEnabled { const NAME: &'static str = "SendEnabled"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.SendEnabled".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.SendEnabled".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// Input models transaction input. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Input { #[prost(string, tag = "1")] @@ -54,13 +50,11 @@ impl ::prost::Name for Input { const NAME: &'static str = "Input"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.Input".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.Input".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// Output models transaction outputs. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Output { #[prost(string, tag = "1")] @@ -72,15 +66,13 @@ impl ::prost::Name for Output { const NAME: &'static str = "Output"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.Output".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.Output".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// Supply represents a struct that passively keeps track of the total supply /// amounts in the network. /// This message is deprecated now that supply is indexed by denom. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Supply { #[prost(message, repeated, tag = "1")] @@ -90,14 +82,12 @@ impl ::prost::Name for Supply { const NAME: &'static str = "Supply"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.Supply".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.Supply".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// DenomUnit represents a struct that describes a given /// denomination unit of the basic token. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DenomUnit { /// denom represents the string name of the given denom unit (e.g uatom). @@ -118,14 +108,12 @@ impl ::prost::Name for DenomUnit { const NAME: &'static str = "DenomUnit"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.DenomUnit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.DenomUnit".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// Metadata represents a struct that describes /// a basic token. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Metadata { #[prost(string, tag = "1")] @@ -167,13 +155,11 @@ impl ::prost::Name for Metadata { const NAME: &'static str = "Metadata"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.Metadata".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.Metadata".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryBalanceRequest is the request type for the Query/Balance RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryBalanceRequest { /// address is the address to query balances for. @@ -187,13 +173,11 @@ impl ::prost::Name for QueryBalanceRequest { const NAME: &'static str = "QueryBalanceRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryBalanceRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryBalanceRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryBalanceResponse is the response type for the Query/Balance RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryBalanceResponse { /// balance is the balance of the coin. @@ -204,13 +188,11 @@ impl ::prost::Name for QueryBalanceResponse { const NAME: &'static str = "QueryBalanceResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryBalanceResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryBalanceResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryBalanceRequest is the request type for the Query/AllBalances RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryAllBalancesRequest { /// address is the address to query balances for. @@ -231,14 +213,12 @@ impl ::prost::Name for QueryAllBalancesRequest { const NAME: &'static str = "QueryAllBalancesRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryAllBalancesRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryAllBalancesRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryAllBalancesResponse is the response type for the Query/AllBalances RPC /// method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryAllBalancesResponse { /// balances is the balances of all the coins. @@ -254,16 +234,14 @@ impl ::prost::Name for QueryAllBalancesResponse { const NAME: &'static str = "QueryAllBalancesResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryAllBalancesResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryAllBalancesResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QuerySpendableBalancesRequest defines the gRPC request structure for querying /// an account's spendable balances. /// /// Since: cosmos-sdk 0.46 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QuerySpendableBalancesRequest { /// address is the address to query spendable balances for. @@ -279,16 +257,14 @@ impl ::prost::Name for QuerySpendableBalancesRequest { const NAME: &'static str = "QuerySpendableBalancesRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QuerySpendableBalancesRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QuerySpendableBalancesRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QuerySpendableBalancesResponse defines the gRPC response structure for querying /// an account's spendable balances. /// /// Since: cosmos-sdk 0.46 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QuerySpendableBalancesResponse { /// balances is the spendable balances of all the coins. @@ -304,16 +280,14 @@ impl ::prost::Name for QuerySpendableBalancesResponse { const NAME: &'static str = "QuerySpendableBalancesResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QuerySpendableBalancesResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QuerySpendableBalancesResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QuerySpendableBalanceByDenomRequest defines the gRPC request structure for /// querying an account's spendable balance for a specific denom. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QuerySpendableBalanceByDenomRequest { /// address is the address to query balances for. @@ -327,16 +301,14 @@ impl ::prost::Name for QuerySpendableBalanceByDenomRequest { const NAME: &'static str = "QuerySpendableBalanceByDenomRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QuerySpendableBalanceByDenomRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QuerySpendableBalanceByDenomRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QuerySpendableBalanceByDenomResponse defines the gRPC response structure for /// querying an account's spendable balance for a specific denom. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QuerySpendableBalanceByDenomResponse { /// balance is the balance of the coin. @@ -347,14 +319,12 @@ impl ::prost::Name for QuerySpendableBalanceByDenomResponse { const NAME: &'static str = "QuerySpendableBalanceByDenomResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QuerySpendableBalanceByDenomResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QuerySpendableBalanceByDenomResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryTotalSupplyRequest is the request type for the Query/TotalSupply RPC /// method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryTotalSupplyRequest { /// pagination defines an optional pagination for the request. @@ -369,14 +339,12 @@ impl ::prost::Name for QueryTotalSupplyRequest { const NAME: &'static str = "QueryTotalSupplyRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryTotalSupplyRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryTotalSupplyRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryTotalSupplyResponse is the response type for the Query/TotalSupply RPC /// method +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryTotalSupplyResponse { /// supply is the supply of the coins @@ -394,13 +362,11 @@ impl ::prost::Name for QueryTotalSupplyResponse { const NAME: &'static str = "QueryTotalSupplyResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryTotalSupplyResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryTotalSupplyResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QuerySupplyOfRequest is the request type for the Query/SupplyOf RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QuerySupplyOfRequest { /// denom is the coin denom to query balances for. @@ -411,13 +377,11 @@ impl ::prost::Name for QuerySupplyOfRequest { const NAME: &'static str = "QuerySupplyOfRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QuerySupplyOfRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QuerySupplyOfRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QuerySupplyOfResponse is the response type for the Query/SupplyOf RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QuerySupplyOfResponse { /// amount is the supply of the coin. @@ -428,26 +392,22 @@ impl ::prost::Name for QuerySupplyOfResponse { const NAME: &'static str = "QuerySupplyOfResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QuerySupplyOfResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QuerySupplyOfResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryParamsRequest defines the request type for querying x/bank parameters. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryParamsRequest {} impl ::prost::Name for QueryParamsRequest { const NAME: &'static str = "QueryParamsRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryParamsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryParamsRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryParamsResponse defines the response type for querying x/bank parameters. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryParamsResponse { #[prost(message, optional, tag = "1")] @@ -457,13 +417,11 @@ impl ::prost::Name for QueryParamsResponse { const NAME: &'static str = "QueryParamsResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryParamsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryParamsResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryDenomsMetadataRequest is the request type for the Query/DenomsMetadata RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryDenomsMetadataRequest { /// pagination defines an optional pagination for the request. @@ -476,14 +434,12 @@ impl ::prost::Name for QueryDenomsMetadataRequest { const NAME: &'static str = "QueryDenomsMetadataRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryDenomsMetadataRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryDenomsMetadataRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryDenomsMetadataResponse is the response type for the Query/DenomsMetadata RPC /// method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryDenomsMetadataResponse { /// metadata provides the client information for all the registered tokens. @@ -499,13 +455,11 @@ impl ::prost::Name for QueryDenomsMetadataResponse { const NAME: &'static str = "QueryDenomsMetadataResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryDenomsMetadataResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryDenomsMetadataResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryDenomMetadataRequest is the request type for the Query/DenomMetadata RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryDenomMetadataRequest { /// denom is the coin denom to query the metadata for. @@ -516,14 +470,12 @@ impl ::prost::Name for QueryDenomMetadataRequest { const NAME: &'static str = "QueryDenomMetadataRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryDenomMetadataRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryDenomMetadataRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryDenomMetadataResponse is the response type for the Query/DenomMetadata RPC /// method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryDenomMetadataResponse { /// metadata describes and provides all the client information for the requested token. @@ -534,15 +486,13 @@ impl ::prost::Name for QueryDenomMetadataResponse { const NAME: &'static str = "QueryDenomMetadataResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryDenomMetadataResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryDenomMetadataResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryDenomOwnersRequest defines the request type for the DenomOwners RPC query, /// which queries for a paginated set of all account holders of a particular /// denomination. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryDenomOwnersRequest { /// denom defines the coin denomination to query all account holders for. @@ -558,10 +508,7 @@ impl ::prost::Name for QueryDenomOwnersRequest { const NAME: &'static str = "QueryDenomOwnersRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryDenomOwnersRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryDenomOwnersRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// DenomOwner defines structure representing an account that owns or holds a @@ -569,6 +516,7 @@ impl ::prost::Name for QueryDenomOwnersRequest { /// balance of the denominated token. /// /// Since: cosmos-sdk 0.46 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DenomOwner { /// address defines the address that owns a particular denomination. @@ -582,15 +530,13 @@ impl ::prost::Name for DenomOwner { const NAME: &'static str = "DenomOwner"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.DenomOwner".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.DenomOwner".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QueryDenomOwnersResponse defines the RPC response of a DenomOwners RPC query. /// /// Since: cosmos-sdk 0.46 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryDenomOwnersResponse { #[prost(message, repeated, tag = "1")] @@ -605,15 +551,13 @@ impl ::prost::Name for QueryDenomOwnersResponse { const NAME: &'static str = "QueryDenomOwnersResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QueryDenomOwnersResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QueryDenomOwnersResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QuerySendEnabledRequest defines the RPC request for looking up SendEnabled entries. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QuerySendEnabledRequest { /// denoms is the specific denoms you want look up. Leave empty to get all entries. @@ -630,15 +574,13 @@ impl ::prost::Name for QuerySendEnabledRequest { const NAME: &'static str = "QuerySendEnabledRequest"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QuerySendEnabledRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QuerySendEnabledRequest".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// QuerySendEnabledResponse defines the RPC response of a SendEnable query. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QuerySendEnabledResponse { #[prost(message, repeated, tag = "1")] @@ -654,22 +596,13 @@ impl ::prost::Name for QuerySendEnabledResponse { const NAME: &'static str = "QuerySendEnabledResponse"; const PACKAGE: &'static str = "cosmos.bank.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.bank.v1beta1.QuerySendEnabledResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.bank.v1beta1.QuerySendEnabledResponse".into() + ::prost::alloc::format!("cosmos.bank.v1beta1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query defines the gRPC querier service. @@ -692,8 +625,8 @@ pub mod query_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -718,7 +651,7 @@ pub mod query_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryClient::new(InterceptedService::new(inner, interceptor)) } @@ -765,7 +698,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -793,7 +727,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -824,7 +759,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -857,7 +793,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -890,7 +827,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -918,7 +856,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -943,7 +882,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -968,7 +908,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -994,7 +935,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1025,7 +967,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1056,7 +999,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1074,17 +1018,11 @@ pub mod query_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServer. #[async_trait] - pub trait Query: std::marker::Send + std::marker::Sync + 'static { + pub trait Query: Send + Sync + 'static { /// Balance queries the balance of a single coin for a single account. async fn balance( &self, @@ -1210,18 +1148,20 @@ pub mod query_server { } /// Query defines the gRPC querier service. #[derive(Debug)] - pub struct QueryServer { - inner: Arc, + pub struct QueryServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServer { + struct _Inner(Arc); + impl QueryServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -1271,8 +1211,8 @@ pub mod query_server { impl tonic::codegen::Service> for QueryServer where T: Query, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -1284,6 +1224,7 @@ pub mod query_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/cosmos.bank.v1beta1.Query/Balance" => { #[allow(non_camel_case_types)] @@ -1314,6 +1255,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = BalanceSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1359,6 +1301,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AllBalancesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1404,6 +1347,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SpendableBalancesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1453,6 +1397,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SpendableBalanceByDenomSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1498,6 +1443,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TotalSupplySvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1543,6 +1489,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SupplyOfSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1586,6 +1533,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ParamsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1631,6 +1579,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = DenomMetadataSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1676,6 +1625,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = DenomsMetadataSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1721,6 +1671,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = DenomOwnersSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1766,6 +1717,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SendEnabledSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1784,25 +1736,20 @@ pub mod query_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServer { + impl Clone for QueryServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -1814,9 +1761,17 @@ pub mod query_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "cosmos.bank.v1beta1.Query"; - impl tonic::server::NamedService for QueryServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServer { + const NAME: &'static str = "cosmos.bank.v1beta1.Query"; } } diff --git a/crates/proto/src/gen/cosmos.base.abci.v1beta1.rs b/crates/proto/src/gen/cosmos.base.abci.v1beta1.rs index e1c9e389dc..574a2519a4 100644 --- a/crates/proto/src/gen/cosmos.base.abci.v1beta1.rs +++ b/crates/proto/src/gen/cosmos.base.abci.v1beta1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// TxResponse defines a structure containing relevant tx data and metadata. The /// tags are stringified and the log is JSON decoded. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxResponse { /// The block height @@ -57,13 +57,11 @@ impl ::prost::Name for TxResponse { const NAME: &'static str = "TxResponse"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.TxResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.TxResponse".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// ABCIMessageLog defines a structure containing an indexed tx ABCI message log. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AbciMessageLog { #[prost(uint32, tag = "1")] @@ -79,14 +77,12 @@ impl ::prost::Name for AbciMessageLog { const NAME: &'static str = "ABCIMessageLog"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.ABCIMessageLog".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.ABCIMessageLog".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// StringEvent defines en Event object wrapper where all the attributes /// contain key/value pairs that are strings instead of raw bytes. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct StringEvent { #[prost(string, tag = "1")] @@ -98,14 +94,12 @@ impl ::prost::Name for StringEvent { const NAME: &'static str = "StringEvent"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.StringEvent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.StringEvent".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// Attribute defines an attribute wrapper where the key and value are /// strings instead of raw bytes. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Attribute { #[prost(string, tag = "1")] @@ -117,14 +111,12 @@ impl ::prost::Name for Attribute { const NAME: &'static str = "Attribute"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.Attribute".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.Attribute".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// GasInfo defines tx execution gas context. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GasInfo { /// GasWanted is the maximum units of work we allow this tx to perform. #[prost(uint64, tag = "1")] @@ -137,13 +129,11 @@ impl ::prost::Name for GasInfo { const NAME: &'static str = "GasInfo"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.GasInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.GasInfo".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// Result is the union of ResponseFormat and ResponseCheckTx. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Result { /// Data is any data returned from message or handler execution. It MUST be @@ -172,14 +162,12 @@ impl ::prost::Name for Result { const NAME: &'static str = "Result"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.Result".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.Result".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// SimulationResponse defines the response generated when a transaction is /// successfully simulated. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SimulationResponse { #[prost(message, optional, tag = "1")] @@ -191,14 +179,12 @@ impl ::prost::Name for SimulationResponse { const NAME: &'static str = "SimulationResponse"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.SimulationResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.SimulationResponse".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// MsgData defines the data returned in a Result object during message /// execution. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MsgData { #[prost(string, tag = "1")] @@ -210,14 +196,12 @@ impl ::prost::Name for MsgData { const NAME: &'static str = "MsgData"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.MsgData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.MsgData".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// TxMsgData defines a list of MsgData. A transaction will have a MsgData object /// for each message. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxMsgData { /// data field is deprecated and not populated. @@ -234,13 +218,11 @@ impl ::prost::Name for TxMsgData { const NAME: &'static str = "TxMsgData"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.TxMsgData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.TxMsgData".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// SearchTxsResult defines a structure for querying txs pageable +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SearchTxsResult { /// Count of all txs @@ -266,13 +248,11 @@ impl ::prost::Name for SearchTxsResult { const NAME: &'static str = "SearchTxsResult"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.SearchTxsResult".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.SearchTxsResult".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } /// SearchBlocksResult defines a structure for querying blocks pageable +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SearchBlocksResult { /// Count of all blocks @@ -300,9 +280,6 @@ impl ::prost::Name for SearchBlocksResult { const NAME: &'static str = "SearchBlocksResult"; const PACKAGE: &'static str = "cosmos.base.abci.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.abci.v1beta1.SearchBlocksResult".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.abci.v1beta1.SearchBlocksResult".into() + ::prost::alloc::format!("cosmos.base.abci.v1beta1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/cosmos.base.query.v1beta1.rs b/crates/proto/src/gen/cosmos.base.query.v1beta1.rs index c973de722f..f25dadc86c 100644 --- a/crates/proto/src/gen/cosmos.base.query.v1beta1.rs +++ b/crates/proto/src/gen/cosmos.base.query.v1beta1.rs @@ -1,4 +1,3 @@ -// This file is @generated by prost-build. /// PageRequest is to be embedded in gRPC request messages for efficient /// pagination. Ex: /// @@ -6,6 +5,7 @@ /// Foo some_parameter = 1; /// PageRequest pagination = 2; /// } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PageRequest { /// key is a value returned in PageResponse.next_key to begin @@ -38,10 +38,7 @@ impl ::prost::Name for PageRequest { const NAME: &'static str = "PageRequest"; const PACKAGE: &'static str = "cosmos.base.query.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.query.v1beta1.PageRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.query.v1beta1.PageRequest".into() + ::prost::alloc::format!("cosmos.base.query.v1beta1.{}", Self::NAME) } } /// PageResponse is to be embedded in gRPC response messages where the @@ -51,6 +48,7 @@ impl ::prost::Name for PageRequest { /// repeated Bar results = 1; /// PageResponse page = 2; /// } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PageResponse { /// next_key is the key to be passed to PageRequest.key to @@ -67,9 +65,6 @@ impl ::prost::Name for PageResponse { const NAME: &'static str = "PageResponse"; const PACKAGE: &'static str = "cosmos.base.query.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.query.v1beta1.PageResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.query.v1beta1.PageResponse".into() + ::prost::alloc::format!("cosmos.base.query.v1beta1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/cosmos.base.v1beta1.rs b/crates/proto/src/gen/cosmos.base.v1beta1.rs index 67b93478c9..a24098b3ee 100644 --- a/crates/proto/src/gen/cosmos.base.v1beta1.rs +++ b/crates/proto/src/gen/cosmos.base.v1beta1.rs @@ -1,8 +1,8 @@ -// This file is @generated by prost-build. /// Coin defines a token with a denomination and an amount. /// /// NOTE: The amount field is an Int which implements the custom method /// signatures required by gogoproto. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Coin { #[prost(string, tag = "1")] @@ -14,16 +14,14 @@ impl ::prost::Name for Coin { const NAME: &'static str = "Coin"; const PACKAGE: &'static str = "cosmos.base.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.v1beta1.Coin".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.v1beta1.Coin".into() + ::prost::alloc::format!("cosmos.base.v1beta1.{}", Self::NAME) } } /// DecCoin defines a token with a denomination and a decimal amount. /// /// NOTE: The amount field is an Dec which implements the custom method /// signatures required by gogoproto. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DecCoin { #[prost(string, tag = "1")] @@ -35,9 +33,6 @@ impl ::prost::Name for DecCoin { const NAME: &'static str = "DecCoin"; const PACKAGE: &'static str = "cosmos.base.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.base.v1beta1.DecCoin".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.base.v1beta1.DecCoin".into() + ::prost::alloc::format!("cosmos.base.v1beta1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/cosmos.crypto.multisig.v1beta1.rs b/crates/proto/src/gen/cosmos.crypto.multisig.v1beta1.rs index 26d4ac43e8..1d784f6978 100644 --- a/crates/proto/src/gen/cosmos.crypto.multisig.v1beta1.rs +++ b/crates/proto/src/gen/cosmos.crypto.multisig.v1beta1.rs @@ -1,7 +1,7 @@ -// This file is @generated by prost-build. /// MultiSignature wraps the signatures from a multisig.LegacyAminoPubKey. /// See cosmos.tx.v1betata1.ModeInfo.Multi for how to specify which signers /// signed and with which modes. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MultiSignature { #[prost(bytes = "vec", repeated, tag = "1")] @@ -11,16 +11,14 @@ impl ::prost::Name for MultiSignature { const NAME: &'static str = "MultiSignature"; const PACKAGE: &'static str = "cosmos.crypto.multisig.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.crypto.multisig.v1beta1.MultiSignature".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.crypto.multisig.v1beta1.MultiSignature".into() + ::prost::alloc::format!("cosmos.crypto.multisig.v1beta1.{}", Self::NAME) } } /// CompactBitArray is an implementation of a space efficient bit array. /// This is used to ensure that the encoded data takes up a minimal amount of /// space after proto encoding. /// This is not thread safe, and is not intended for concurrent usage. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CompactBitArray { #[prost(uint32, tag = "1")] @@ -32,9 +30,6 @@ impl ::prost::Name for CompactBitArray { const NAME: &'static str = "CompactBitArray"; const PACKAGE: &'static str = "cosmos.crypto.multisig.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.crypto.multisig.v1beta1.CompactBitArray".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.crypto.multisig.v1beta1.CompactBitArray".into() + ::prost::alloc::format!("cosmos.crypto.multisig.v1beta1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/cosmos.tx.config.v1.rs b/crates/proto/src/gen/cosmos.tx.config.v1.rs index 31586d462d..b79ab2e0fd 100644 --- a/crates/proto/src/gen/cosmos.tx.config.v1.rs +++ b/crates/proto/src/gen/cosmos.tx.config.v1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// Config is the config object of the x/auth/tx package. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Config { /// skip_ante_handler defines whether the ante handler registration should be skipped in case an app wants to override /// this functionality. @@ -15,9 +15,6 @@ impl ::prost::Name for Config { const NAME: &'static str = "Config"; const PACKAGE: &'static str = "cosmos.tx.config.v1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.config.v1.Config".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.config.v1.Config".into() + ::prost::alloc::format!("cosmos.tx.config.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/cosmos.tx.signing.v1beta1.rs b/crates/proto/src/gen/cosmos.tx.signing.v1beta1.rs index 25c93fb9de..c5b8f2f52f 100644 --- a/crates/proto/src/gen/cosmos.tx.signing.v1beta1.rs +++ b/crates/proto/src/gen/cosmos.tx.signing.v1beta1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// SignatureDescriptors wraps multiple SignatureDescriptor's. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignatureDescriptors { /// signatures are the signature descriptors @@ -10,16 +10,14 @@ impl ::prost::Name for SignatureDescriptors { const NAME: &'static str = "SignatureDescriptors"; const PACKAGE: &'static str = "cosmos.tx.signing.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.signing.v1beta1.SignatureDescriptors".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.signing.v1beta1.SignatureDescriptors".into() + ::prost::alloc::format!("cosmos.tx.signing.v1beta1.{}", Self::NAME) } } /// SignatureDescriptor is a convenience type which represents the full data for /// a signature including the public key of the signer, signing modes and the /// signature itself. It is primarily used for coordinating signatures between /// clients. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignatureDescriptor { /// public_key is the public key of the signer @@ -36,6 +34,7 @@ pub struct SignatureDescriptor { /// Nested message and enum types in `SignatureDescriptor`. pub mod signature_descriptor { /// Data represents signature data + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Data { /// sum is the oneof that specifies whether this represents single or multi-signature data @@ -45,6 +44,7 @@ pub mod signature_descriptor { /// Nested message and enum types in `Data`. pub mod data { /// Single is the signature data for a single signer + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Single { /// mode is the signing mode of the single signer @@ -58,13 +58,13 @@ pub mod signature_descriptor { const NAME: &'static str = "Single"; const PACKAGE: &'static str = "cosmos.tx.signing.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.signing.v1beta1.SignatureDescriptor.Data.Single".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.signing.v1beta1.SignatureDescriptor.Data.Single".into() + ::prost::alloc::format!( + "cosmos.tx.signing.v1beta1.SignatureDescriptor.Data.{}", Self::NAME + ) } } /// Multi is the signature data for a multisig public key + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Multi { /// bitarray specifies which keys within the multisig are signing @@ -80,13 +80,13 @@ pub mod signature_descriptor { const NAME: &'static str = "Multi"; const PACKAGE: &'static str = "cosmos.tx.signing.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.signing.v1beta1.SignatureDescriptor.Data.Multi".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.signing.v1beta1.SignatureDescriptor.Data.Multi".into() + ::prost::alloc::format!( + "cosmos.tx.signing.v1beta1.SignatureDescriptor.Data.{}", Self::NAME + ) } } /// sum is the oneof that specifies whether this represents single or multi-signature data + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Sum { /// single represents a single signer @@ -101,10 +101,9 @@ pub mod signature_descriptor { const NAME: &'static str = "Data"; const PACKAGE: &'static str = "cosmos.tx.signing.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.signing.v1beta1.SignatureDescriptor.Data".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.signing.v1beta1.SignatureDescriptor.Data".into() + ::prost::alloc::format!( + "cosmos.tx.signing.v1beta1.SignatureDescriptor.{}", Self::NAME + ) } } } @@ -112,10 +111,7 @@ impl ::prost::Name for SignatureDescriptor { const NAME: &'static str = "SignatureDescriptor"; const PACKAGE: &'static str = "cosmos.tx.signing.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.signing.v1beta1.SignatureDescriptor".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.signing.v1beta1.SignatureDescriptor".into() + ::prost::alloc::format!("cosmos.tx.signing.v1beta1.{}", Self::NAME) } } /// SignMode represents a signing mode with its own security guarantees. @@ -170,12 +166,12 @@ impl SignMode { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "SIGN_MODE_UNSPECIFIED", - Self::Direct => "SIGN_MODE_DIRECT", - Self::Textual => "SIGN_MODE_TEXTUAL", - Self::DirectAux => "SIGN_MODE_DIRECT_AUX", - Self::LegacyAminoJson => "SIGN_MODE_LEGACY_AMINO_JSON", - Self::Eip191 => "SIGN_MODE_EIP_191", + SignMode::Unspecified => "SIGN_MODE_UNSPECIFIED", + SignMode::Direct => "SIGN_MODE_DIRECT", + SignMode::Textual => "SIGN_MODE_TEXTUAL", + SignMode::DirectAux => "SIGN_MODE_DIRECT_AUX", + SignMode::LegacyAminoJson => "SIGN_MODE_LEGACY_AMINO_JSON", + SignMode::Eip191 => "SIGN_MODE_EIP_191", } } /// Creates an enum from field names used in the ProtoBuf definition. diff --git a/crates/proto/src/gen/cosmos.tx.v1beta1.rs b/crates/proto/src/gen/cosmos.tx.v1beta1.rs index 0db9732ef4..8aeb187cda 100644 --- a/crates/proto/src/gen/cosmos.tx.v1beta1.rs +++ b/crates/proto/src/gen/cosmos.tx.v1beta1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// Tx is the standard type used for broadcasting transactions. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Tx { /// body is the processable content of the transaction @@ -19,10 +19,7 @@ impl ::prost::Name for Tx { const NAME: &'static str = "Tx"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.Tx".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.Tx".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxRaw is a variant of Tx that pins the signer's exact binary representation @@ -30,6 +27,7 @@ impl ::prost::Name for Tx { /// verification. The binary `serialize(tx: TxRaw)` is stored in Tendermint and /// the hash `sha256(serialize(tx: TxRaw))` becomes the "txhash", commonly used /// as the transaction ID. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxRaw { /// body_bytes is a protobuf serialization of a TxBody that matches the @@ -50,13 +48,11 @@ impl ::prost::Name for TxRaw { const NAME: &'static str = "TxRaw"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxRaw".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxRaw".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// SignDoc is the type used for generating sign bytes for SIGN_MODE_DIRECT. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignDoc { /// body_bytes is protobuf serialization of a TxBody that matches the @@ -80,16 +76,14 @@ impl ::prost::Name for SignDoc { const NAME: &'static str = "SignDoc"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.SignDoc".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.SignDoc".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// SignDocDirectAux is the type used for generating sign bytes for /// SIGN_MODE_DIRECT_AUX. /// /// Since: cosmos-sdk 0.46 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignDocDirectAux { /// body_bytes is protobuf serialization of a TxBody that matches the @@ -123,13 +117,11 @@ impl ::prost::Name for SignDocDirectAux { const NAME: &'static str = "SignDocDirectAux"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.SignDocDirectAux".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.SignDocDirectAux".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxBody is the body of a transaction that all signers sign over. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxBody { /// messages is a list of messages to be executed. The required signers of @@ -165,14 +157,12 @@ impl ::prost::Name for TxBody { const NAME: &'static str = "TxBody"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxBody".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// AuthInfo describes the fee and signer modes that are used to sign a /// transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthInfo { /// signer_infos defines the signing modes for the required signers. The number @@ -200,14 +190,12 @@ impl ::prost::Name for AuthInfo { const NAME: &'static str = "AuthInfo"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.AuthInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.AuthInfo".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// SignerInfo describes the public key and signing mode of a single top-level /// signer. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignerInfo { /// public_key is the public key of the signer. It is optional for accounts @@ -229,13 +217,11 @@ impl ::prost::Name for SignerInfo { const NAME: &'static str = "SignerInfo"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.SignerInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.SignerInfo".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// ModeInfo describes the signing mode of a single or nested multisig signer. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ModeInfo { /// sum is the oneof that specifies whether this represents a single or nested @@ -248,7 +234,8 @@ pub mod mode_info { /// Single is the mode info for a single signer. It is structured as a message /// to allow for additional fields such as locale for SIGN_MODE_TEXTUAL in the /// future - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Single { /// mode is the signing mode of the single signer #[prost(enumeration = "super::super::signing::v1beta1::SignMode", tag = "1")] @@ -258,13 +245,11 @@ pub mod mode_info { const NAME: &'static str = "Single"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.ModeInfo.Single".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.ModeInfo.Single".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.ModeInfo.{}", Self::NAME) } } /// Multi is the mode info for a multisig public key + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Multi { /// bitarray specifies which keys within the multisig are signing @@ -281,14 +266,12 @@ pub mod mode_info { const NAME: &'static str = "Multi"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.ModeInfo.Multi".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.ModeInfo.Multi".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.ModeInfo.{}", Self::NAME) } } /// sum is the oneof that specifies whether this represents a single or nested /// multisig signer + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Sum { /// single represents a single signer @@ -303,15 +286,13 @@ impl ::prost::Name for ModeInfo { const NAME: &'static str = "ModeInfo"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.ModeInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.ModeInfo".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// Fee includes the amount of coins paid in fees and the maximum /// gas to be used by the transaction. The ratio yields an effective "gasprice", /// which must be above some miminum to be accepted into the mempool. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Fee { /// amount is the amount of coins to be paid as a fee @@ -336,15 +317,13 @@ impl ::prost::Name for Fee { const NAME: &'static str = "Fee"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.Fee".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.Fee".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// Tip is the tip used for meta-transactions. /// /// Since: cosmos-sdk 0.46 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Tip { /// amount is the amount of the tip @@ -358,10 +337,7 @@ impl ::prost::Name for Tip { const NAME: &'static str = "Tip"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.Tip".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.Tip".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// AuxSignerData is the intermediary format that an auxiliary signer (e.g. a @@ -370,6 +346,7 @@ impl ::prost::Name for Tip { /// by the node if sent directly as-is. /// /// Since: cosmos-sdk 0.46 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuxSignerData { /// address is the bech32-encoded address of the auxiliary signer. If using @@ -393,14 +370,12 @@ impl ::prost::Name for AuxSignerData { const NAME: &'static str = "AuxSignerData"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.AuxSignerData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.AuxSignerData".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// GetTxsEventRequest is the request type for the Service.TxsByEvents /// RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTxsEventRequest { /// events is the list of transaction event type. @@ -437,14 +412,12 @@ impl ::prost::Name for GetTxsEventRequest { const NAME: &'static str = "GetTxsEventRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.GetTxsEventRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.GetTxsEventRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// GetTxsEventResponse is the response type for the Service.TxsByEvents /// RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTxsEventResponse { /// txs is the list of queried transactions. @@ -470,14 +443,12 @@ impl ::prost::Name for GetTxsEventResponse { const NAME: &'static str = "GetTxsEventResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.GetTxsEventResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.GetTxsEventResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// BroadcastTxRequest is the request type for the Service.BroadcastTxRequest /// RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastTxRequest { /// tx_bytes is the raw transaction. @@ -490,14 +461,12 @@ impl ::prost::Name for BroadcastTxRequest { const NAME: &'static str = "BroadcastTxRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.BroadcastTxRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.BroadcastTxRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// BroadcastTxResponse is the response type for the /// Service.BroadcastTx method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastTxResponse { /// tx_response is the queried TxResponses. @@ -510,14 +479,12 @@ impl ::prost::Name for BroadcastTxResponse { const NAME: &'static str = "BroadcastTxResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.BroadcastTxResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.BroadcastTxResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// SimulateRequest is the request type for the Service.Simulate /// RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SimulateRequest { /// tx is the transaction to simulate. @@ -535,14 +502,12 @@ impl ::prost::Name for SimulateRequest { const NAME: &'static str = "SimulateRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.SimulateRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.SimulateRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// SimulateResponse is the response type for the /// Service.SimulateRPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SimulateResponse { /// gas_info is the information about gas used in the simulation. @@ -556,14 +521,12 @@ impl ::prost::Name for SimulateResponse { const NAME: &'static str = "SimulateResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.SimulateResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.SimulateResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// GetTxRequest is the request type for the Service.GetTx /// RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTxRequest { /// hash is the tx hash to query, encoded as a hex string. @@ -574,13 +537,11 @@ impl ::prost::Name for GetTxRequest { const NAME: &'static str = "GetTxRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.GetTxRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.GetTxRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// GetTxResponse is the response type for the Service.GetTx method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTxResponse { /// tx is the queried transaction. @@ -596,16 +557,14 @@ impl ::prost::Name for GetTxResponse { const NAME: &'static str = "GetTxResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.GetTxResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.GetTxResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// GetBlockWithTxsRequest is the request type for the Service.GetBlockWithTxs /// RPC method. /// /// Since: cosmos-sdk 0.45.2 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetBlockWithTxsRequest { /// height is the height of the block to query. @@ -621,16 +580,14 @@ impl ::prost::Name for GetBlockWithTxsRequest { const NAME: &'static str = "GetBlockWithTxsRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.GetBlockWithTxsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.GetBlockWithTxsRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// GetBlockWithTxsResponse is the response type for the Service.GetBlockWithTxs /// method. /// /// Since: cosmos-sdk 0.45.2 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetBlockWithTxsResponse { /// txs are the transactions in the block. @@ -652,16 +609,14 @@ impl ::prost::Name for GetBlockWithTxsResponse { const NAME: &'static str = "GetBlockWithTxsResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.GetBlockWithTxsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.GetBlockWithTxsResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxDecodeRequest is the request type for the Service.TxDecode /// RPC method. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxDecodeRequest { /// tx_bytes is the raw transaction. @@ -672,16 +627,14 @@ impl ::prost::Name for TxDecodeRequest { const NAME: &'static str = "TxDecodeRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxDecodeRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxDecodeRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxDecodeResponse is the response type for the /// Service.TxDecode method. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxDecodeResponse { /// tx is the decoded transaction. @@ -692,16 +645,14 @@ impl ::prost::Name for TxDecodeResponse { const NAME: &'static str = "TxDecodeResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxDecodeResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxDecodeResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxEncodeRequest is the request type for the Service.TxEncode /// RPC method. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxEncodeRequest { /// tx is the transaction to encode. @@ -712,16 +663,14 @@ impl ::prost::Name for TxEncodeRequest { const NAME: &'static str = "TxEncodeRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxEncodeRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxEncodeRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxEncodeResponse is the response type for the /// Service.TxEncode method. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxEncodeResponse { /// tx_bytes is the encoded transaction bytes. @@ -732,16 +681,14 @@ impl ::prost::Name for TxEncodeResponse { const NAME: &'static str = "TxEncodeResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxEncodeResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxEncodeResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxEncodeAminoRequest is the request type for the Service.TxEncodeAmino /// RPC method. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxEncodeAminoRequest { #[prost(string, tag = "1")] @@ -751,16 +698,14 @@ impl ::prost::Name for TxEncodeAminoRequest { const NAME: &'static str = "TxEncodeAminoRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxEncodeAminoRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxEncodeAminoRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxEncodeAminoResponse is the response type for the Service.TxEncodeAmino /// RPC method. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxEncodeAminoResponse { #[prost(bytes = "vec", tag = "1")] @@ -770,16 +715,14 @@ impl ::prost::Name for TxEncodeAminoResponse { const NAME: &'static str = "TxEncodeAminoResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxEncodeAminoResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxEncodeAminoResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxDecodeAminoRequest is the request type for the Service.TxDecodeAmino /// RPC method. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxDecodeAminoRequest { #[prost(bytes = "vec", tag = "1")] @@ -789,16 +732,14 @@ impl ::prost::Name for TxDecodeAminoRequest { const NAME: &'static str = "TxDecodeAminoRequest"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxDecodeAminoRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxDecodeAminoRequest".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// TxDecodeAminoResponse is the response type for the Service.TxDecodeAmino /// RPC method. /// /// Since: cosmos-sdk 0.47 +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxDecodeAminoResponse { #[prost(string, tag = "1")] @@ -808,10 +749,7 @@ impl ::prost::Name for TxDecodeAminoResponse { const NAME: &'static str = "TxDecodeAminoResponse"; const PACKAGE: &'static str = "cosmos.tx.v1beta1"; fn full_name() -> ::prost::alloc::string::String { - "cosmos.tx.v1beta1.TxDecodeAminoResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos.tx.v1beta1.TxDecodeAminoResponse".into() + ::prost::alloc::format!("cosmos.tx.v1beta1.{}", Self::NAME) } } /// OrderBy defines the sorting order @@ -833,9 +771,9 @@ impl OrderBy { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "ORDER_BY_UNSPECIFIED", - Self::Asc => "ORDER_BY_ASC", - Self::Desc => "ORDER_BY_DESC", + OrderBy::Unspecified => "ORDER_BY_UNSPECIFIED", + OrderBy::Asc => "ORDER_BY_ASC", + OrderBy::Desc => "ORDER_BY_DESC", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -872,10 +810,10 @@ impl BroadcastMode { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "BROADCAST_MODE_UNSPECIFIED", - Self::Block => "BROADCAST_MODE_BLOCK", - Self::Sync => "BROADCAST_MODE_SYNC", - Self::Async => "BROADCAST_MODE_ASYNC", + BroadcastMode::Unspecified => "BROADCAST_MODE_UNSPECIFIED", + BroadcastMode::Block => "BROADCAST_MODE_BLOCK", + BroadcastMode::Sync => "BROADCAST_MODE_SYNC", + BroadcastMode::Async => "BROADCAST_MODE_ASYNC", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -892,13 +830,7 @@ impl BroadcastMode { /// Generated client implementations. #[cfg(feature = "rpc")] pub mod service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Service defines a gRPC service for interacting with transactions. @@ -921,8 +853,8 @@ pub mod service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -947,7 +879,7 @@ pub mod service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { ServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -994,7 +926,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1016,7 +949,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1041,7 +975,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1066,7 +1001,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1093,7 +1029,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1120,7 +1057,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1147,7 +1085,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1174,7 +1113,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1201,7 +1141,8 @@ pub mod service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1219,17 +1160,11 @@ pub mod service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with ServiceServer. #[async_trait] - pub trait Service: std::marker::Send + std::marker::Sync + 'static { + pub trait Service: Send + Sync + 'static { /// Simulate simulates executing a transaction for estimating gas usage. async fn simulate( &self, @@ -1312,18 +1247,20 @@ pub mod service_server { } /// Service defines a gRPC service for interacting with transactions. #[derive(Debug)] - pub struct ServiceServer { - inner: Arc, + pub struct ServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl ServiceServer { + struct _Inner(Arc); + impl ServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -1373,8 +1310,8 @@ pub mod service_server { impl tonic::codegen::Service> for ServiceServer where T: Service, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -1386,6 +1323,7 @@ pub mod service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/cosmos.tx.v1beta1.Service/Simulate" => { #[allow(non_camel_case_types)] @@ -1414,6 +1352,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SimulateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1457,6 +1396,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = GetTxSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1502,6 +1442,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = BroadcastTxSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1547,6 +1488,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = GetTxsEventSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1592,6 +1534,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = GetBlockWithTxsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1635,6 +1578,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TxDecodeSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1678,6 +1622,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TxEncodeSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1723,6 +1668,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TxEncodeAminoSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1768,6 +1714,7 @@ pub mod service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TxDecodeAminoSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1786,25 +1733,20 @@ pub mod service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for ServiceServer { + impl Clone for ServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -1816,9 +1758,17 @@ pub mod service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "cosmos.tx.v1beta1.Service"; - impl tonic::server::NamedService for ServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for ServiceServer { + const NAME: &'static str = "cosmos.tx.v1beta1.Service"; } } diff --git a/crates/proto/src/gen/cosmos_proto.rs b/crates/proto/src/gen/cosmos_proto.rs index 9057ec92dd..053c6d5266 100644 --- a/crates/proto/src/gen/cosmos_proto.rs +++ b/crates/proto/src/gen/cosmos_proto.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// InterfaceDescriptor describes an interface type to be used with /// accepts_interface and implements_interface and declared by declare_interface. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct InterfaceDescriptor { /// name is the name of the interface. It should be a short-name (without @@ -18,10 +18,7 @@ impl ::prost::Name for InterfaceDescriptor { const NAME: &'static str = "InterfaceDescriptor"; const PACKAGE: &'static str = "cosmos_proto"; fn full_name() -> ::prost::alloc::string::String { - "cosmos_proto.InterfaceDescriptor".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos_proto.InterfaceDescriptor".into() + ::prost::alloc::format!("cosmos_proto.{}", Self::NAME) } } /// ScalarDescriptor describes an scalar type to be used with @@ -31,6 +28,7 @@ impl ::prost::Name for InterfaceDescriptor { /// Scalars should ideally define an encoding such that there is only one /// valid syntactical representation for a given semantic meaning, /// i.e. the encoding should be deterministic. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ScalarDescriptor { /// name is the name of the scalar. It should be a short-name (without @@ -55,10 +53,7 @@ impl ::prost::Name for ScalarDescriptor { const NAME: &'static str = "ScalarDescriptor"; const PACKAGE: &'static str = "cosmos_proto"; fn full_name() -> ::prost::alloc::string::String { - "cosmos_proto.ScalarDescriptor".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/cosmos_proto.ScalarDescriptor".into() + ::prost::alloc::format!("cosmos_proto.{}", Self::NAME) } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] @@ -75,9 +70,9 @@ impl ScalarType { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "SCALAR_TYPE_UNSPECIFIED", - Self::String => "SCALAR_TYPE_STRING", - Self::Bytes => "SCALAR_TYPE_BYTES", + ScalarType::Unspecified => "SCALAR_TYPE_UNSPECIFIED", + ScalarType::String => "SCALAR_TYPE_STRING", + ScalarType::Bytes => "SCALAR_TYPE_BYTES", } } /// Creates an enum from field names used in the ProtoBuf definition. diff --git a/crates/proto/src/gen/google.api.rs b/crates/proto/src/gen/google.api.rs index 04c0f1fcf1..6655a2b5d5 100644 --- a/crates/proto/src/gen/google.api.rs +++ b/crates/proto/src/gen/google.api.rs @@ -1,7 +1,7 @@ -// This file is @generated by prost-build. /// Defines the HTTP configuration for an API service. It contains a list of /// \[HttpRule\]\[google.api.HttpRule\], each specifying the mapping of an RPC method /// to one or more HTTP REST API methods. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Http { /// A list of HTTP configuration rules that apply to individual API methods. @@ -22,10 +22,7 @@ impl ::prost::Name for Http { const NAME: &'static str = "Http"; const PACKAGE: &'static str = "google.api"; fn full_name() -> ::prost::alloc::string::String { - "google.api.Http".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/google.api.Http".into() + ::prost::alloc::format!("google.api.{}", Self::NAME) } } /// # gRPC Transcoding @@ -310,6 +307,7 @@ impl ::prost::Name for Http { /// If an API needs to use a JSON array for request or response body, it can map /// the request or response body to a repeated field. However, some gRPC /// Transcoding implementations may not support this feature. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct HttpRule { /// Selects a method to which this rule applies. @@ -349,6 +347,7 @@ pub mod http_rule { /// Determines the URL pattern is matched by this rules. This pattern can be /// used with any of the {get|put|post|delete|patch} methods. A custom method /// can be defined using the 'custom' field. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Pattern { /// Maps to HTTP GET. Used for listing and getting information about @@ -379,13 +378,11 @@ impl ::prost::Name for HttpRule { const NAME: &'static str = "HttpRule"; const PACKAGE: &'static str = "google.api"; fn full_name() -> ::prost::alloc::string::String { - "google.api.HttpRule".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/google.api.HttpRule".into() + ::prost::alloc::format!("google.api.{}", Self::NAME) } } /// A custom pattern is used for defining custom HTTP verb. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CustomHttpPattern { /// The name of this custom HTTP verb. @@ -399,9 +396,6 @@ impl ::prost::Name for CustomHttpPattern { const NAME: &'static str = "CustomHttpPattern"; const PACKAGE: &'static str = "google.api"; fn full_name() -> ::prost::alloc::string::String { - "google.api.CustomHttpPattern".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/google.api.CustomHttpPattern".into() + ::prost::alloc::format!("google.api.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/ibc.applications.transfer.v1.rs b/crates/proto/src/gen/ibc.applications.transfer.v1.rs index efd11eb84c..eb7e290e45 100644 --- a/crates/proto/src/gen/ibc.applications.transfer.v1.rs +++ b/crates/proto/src/gen/ibc.applications.transfer.v1.rs @@ -1,14 +1,7 @@ -// This file is @generated by prost-build. /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query provides defines the gRPC querier service. @@ -31,8 +24,8 @@ pub mod query_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -57,7 +50,7 @@ pub mod query_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryClient::new(InterceptedService::new(inner, interceptor)) } @@ -108,7 +101,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -139,7 +133,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -170,7 +165,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -199,7 +195,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -230,7 +227,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -264,7 +262,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -287,17 +286,11 @@ pub mod query_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServer. #[async_trait] - pub trait Query: std::marker::Send + std::marker::Sync + 'static { + pub trait Query: Send + Sync + 'static { /// DenomTrace queries a denomination trace information. async fn denom_trace( &self, @@ -373,18 +366,20 @@ pub mod query_server { } /// Query provides defines the gRPC querier service. #[derive(Debug)] - pub struct QueryServer { - inner: Arc, + pub struct QueryServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServer { + struct _Inner(Arc); + impl QueryServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -434,8 +429,8 @@ pub mod query_server { impl tonic::codegen::Service> for QueryServer where T: Query, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -447,6 +442,7 @@ pub mod query_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/ibc.applications.transfer.v1.Query/DenomTrace" => { #[allow(non_camel_case_types)] @@ -480,6 +476,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = DenomTraceSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -528,6 +525,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = DenomTracesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -576,6 +574,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ParamsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -624,6 +623,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = DenomHashSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -672,6 +672,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = EscrowAddressSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -720,6 +721,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TotalEscrowForDenomSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -738,25 +740,20 @@ pub mod query_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServer { + impl Clone for QueryServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -768,9 +765,17 @@ pub mod query_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "ibc.applications.transfer.v1.Query"; - impl tonic::server::NamedService for QueryServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServer { + const NAME: &'static str = "ibc.applications.transfer.v1.Query"; } } diff --git a/crates/proto/src/gen/ibc.core.channel.v1.rs b/crates/proto/src/gen/ibc.core.channel.v1.rs index 19af5dec15..bfc483f8af 100644 --- a/crates/proto/src/gen/ibc.core.channel.v1.rs +++ b/crates/proto/src/gen/ibc.core.channel.v1.rs @@ -1,14 +1,7 @@ -// This file is @generated by prost-build. /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query provides defines the gRPC querier service @@ -31,8 +24,8 @@ pub mod query_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -57,7 +50,7 @@ pub mod query_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryClient::new(InterceptedService::new(inner, interceptor)) } @@ -106,7 +99,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -133,7 +127,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -163,7 +158,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -195,7 +191,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -227,7 +224,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -258,7 +256,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -290,7 +289,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -322,7 +322,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -351,7 +352,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -383,7 +385,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -418,7 +421,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -450,7 +454,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -479,7 +484,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -510,7 +516,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -530,17 +537,11 @@ pub mod query_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServer. #[async_trait] - pub trait Query: std::marker::Send + std::marker::Sync + 'static { + pub trait Query: Send + Sync + 'static { /// Channel queries an IBC Channel. async fn channel( &self, @@ -716,18 +717,20 @@ pub mod query_server { } /// Query provides defines the gRPC querier service #[derive(Debug)] - pub struct QueryServer { - inner: Arc, + pub struct QueryServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServer { + struct _Inner(Arc); + impl QueryServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -777,8 +780,8 @@ pub mod query_server { impl tonic::codegen::Service> for QueryServer where T: Query, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -790,6 +793,7 @@ pub mod query_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/ibc.core.channel.v1.Query/Channel" => { #[allow(non_camel_case_types)] @@ -823,6 +827,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ChannelSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -871,6 +876,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ChannelsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -919,6 +925,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConnectionChannelsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -967,6 +974,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ChannelClientStateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1015,6 +1023,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ChannelConsensusStateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1063,6 +1072,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = PacketCommitmentSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1111,6 +1121,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = PacketCommitmentsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1159,6 +1170,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = PacketReceiptSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1207,6 +1219,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = PacketAcknowledgementSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1255,6 +1268,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = PacketAcknowledgementsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1303,6 +1317,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = UnreceivedPacketsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1351,6 +1366,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = UnreceivedAcksSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1399,6 +1415,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = NextSequenceReceiveSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1447,6 +1464,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = NextSequenceSendSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1465,25 +1483,20 @@ pub mod query_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServer { + impl Clone for QueryServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -1495,9 +1508,17 @@ pub mod query_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "ibc.core.channel.v1.Query"; - impl tonic::server::NamedService for QueryServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServer { + const NAME: &'static str = "ibc.core.channel.v1.Query"; } } diff --git a/crates/proto/src/gen/ibc.core.client.v1.rs b/crates/proto/src/gen/ibc.core.client.v1.rs index 091eb96dec..aa8983788e 100644 --- a/crates/proto/src/gen/ibc.core.client.v1.rs +++ b/crates/proto/src/gen/ibc.core.client.v1.rs @@ -1,14 +1,7 @@ -// This file is @generated by prost-build. /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query provides defines the gRPC querier service @@ -31,8 +24,8 @@ pub mod query_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -57,7 +50,7 @@ pub mod query_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryClient::new(InterceptedService::new(inner, interceptor)) } @@ -108,7 +101,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -137,7 +131,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -167,7 +162,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -197,7 +193,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -226,7 +223,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -257,7 +255,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -286,7 +285,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -315,7 +315,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -346,7 +347,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -366,17 +368,11 @@ pub mod query_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServer. #[async_trait] - pub trait Query: std::marker::Send + std::marker::Sync + 'static { + pub trait Query: Send + Sync + 'static { /// ClientState queries an IBC light client. async fn client_state( &self, @@ -490,18 +486,20 @@ pub mod query_server { } /// Query provides defines the gRPC querier service #[derive(Debug)] - pub struct QueryServer { - inner: Arc, + pub struct QueryServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServer { + struct _Inner(Arc); + impl QueryServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -551,8 +549,8 @@ pub mod query_server { impl tonic::codegen::Service> for QueryServer where T: Query, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -564,6 +562,7 @@ pub mod query_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/ibc.core.client.v1.Query/ClientState" => { #[allow(non_camel_case_types)] @@ -597,6 +596,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ClientStateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -645,6 +645,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ClientStatesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -693,6 +694,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConsensusStateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -741,6 +743,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConsensusStatesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -789,6 +792,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConsensusStateHeightsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -837,6 +841,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ClientStatusSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -885,6 +890,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ClientParamsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -933,6 +939,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = UpgradedClientStateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -982,6 +989,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = UpgradedConsensusStateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1000,25 +1008,20 @@ pub mod query_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServer { + impl Clone for QueryServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -1030,9 +1033,17 @@ pub mod query_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "ibc.core.client.v1.Query"; - impl tonic::server::NamedService for QueryServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServer { + const NAME: &'static str = "ibc.core.client.v1.Query"; } } diff --git a/crates/proto/src/gen/ibc.core.connection.v1.rs b/crates/proto/src/gen/ibc.core.connection.v1.rs index 24f8f6d06e..a9e9ad151d 100644 --- a/crates/proto/src/gen/ibc.core.connection.v1.rs +++ b/crates/proto/src/gen/ibc.core.connection.v1.rs @@ -1,14 +1,7 @@ -// This file is @generated by prost-build. /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query provides defines the gRPC querier service @@ -31,8 +24,8 @@ pub mod query_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -57,7 +50,7 @@ pub mod query_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryClient::new(InterceptedService::new(inner, interceptor)) } @@ -108,7 +101,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -137,7 +131,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -167,7 +162,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -199,7 +195,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -234,7 +231,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -268,7 +266,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -288,17 +287,11 @@ pub mod query_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServer. #[async_trait] - pub trait Query: std::marker::Send + std::marker::Sync + 'static { + pub trait Query: Send + Sync + 'static { /// Connection queries an IBC connection end. async fn connection( &self, @@ -377,18 +370,20 @@ pub mod query_server { } /// Query provides defines the gRPC querier service #[derive(Debug)] - pub struct QueryServer { - inner: Arc, + pub struct QueryServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServer { + struct _Inner(Arc); + impl QueryServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -438,8 +433,8 @@ pub mod query_server { impl tonic::codegen::Service> for QueryServer where T: Query, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -451,6 +446,7 @@ pub mod query_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/ibc.core.connection.v1.Query/Connection" => { #[allow(non_camel_case_types)] @@ -484,6 +480,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConnectionSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -532,6 +529,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConnectionsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -580,6 +578,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ClientConnectionsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -628,6 +627,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConnectionClientStateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -677,6 +677,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConnectionConsensusStateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -725,6 +726,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConnectionParamsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -743,25 +745,20 @@ pub mod query_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServer { + impl Clone for QueryServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -773,9 +770,17 @@ pub mod query_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "ibc.core.connection.v1.Query"; - impl tonic::server::NamedService for QueryServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServer { + const NAME: &'static str = "ibc.core.connection.v1.Query"; } } diff --git a/crates/proto/src/gen/noble.forwarding.v1.rs b/crates/proto/src/gen/noble.forwarding.v1.rs index 2d327011c7..1bfadd3576 100644 --- a/crates/proto/src/gen/noble.forwarding.v1.rs +++ b/crates/proto/src/gen/noble.forwarding.v1.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ForwardingAccount { #[prost(message, optional, tag = "1")] @@ -16,12 +16,10 @@ impl ::prost::Name for ForwardingAccount { const NAME: &'static str = "ForwardingAccount"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.ForwardingAccount".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.ForwardingAccount".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ForwardingPubKey { #[prost(bytes = "vec", tag = "1")] @@ -31,12 +29,10 @@ impl ::prost::Name for ForwardingPubKey { const NAME: &'static str = "ForwardingPubKey"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.ForwardingPubKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.ForwardingPubKey".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisState { #[prost(map = "string, uint64", tag = "1")] @@ -59,12 +55,10 @@ impl ::prost::Name for GenesisState { const NAME: &'static str = "GenesisState"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.GenesisState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.GenesisState".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RegisterAccountData { #[prost(string, tag = "1")] @@ -76,12 +70,10 @@ impl ::prost::Name for RegisterAccountData { const NAME: &'static str = "RegisterAccountData"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.RegisterAccountData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.RegisterAccountData".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RegisterAccountMemo { #[prost(message, optional, tag = "1")] @@ -89,6 +81,7 @@ pub struct RegisterAccountMemo { } /// Nested message and enum types in `RegisterAccountMemo`. pub mod register_account_memo { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RegisterAccountDataWrapper { #[prost(message, optional, tag = "1")] @@ -98,10 +91,9 @@ pub mod register_account_memo { const NAME: &'static str = "RegisterAccountDataWrapper"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.RegisterAccountMemo.RegisterAccountDataWrapper".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.RegisterAccountMemo.RegisterAccountDataWrapper".into() + ::prost::alloc::format!( + "noble.forwarding.v1.RegisterAccountMemo.{}", Self::NAME + ) } } } @@ -109,12 +101,10 @@ impl ::prost::Name for RegisterAccountMemo { const NAME: &'static str = "RegisterAccountMemo"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.RegisterAccountMemo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.RegisterAccountMemo".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryAddress { #[prost(string, tag = "1")] @@ -126,12 +116,10 @@ impl ::prost::Name for QueryAddress { const NAME: &'static str = "QueryAddress"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.QueryAddress".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.QueryAddress".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryAddressResponse { #[prost(string, tag = "1")] @@ -143,24 +131,20 @@ impl ::prost::Name for QueryAddressResponse { const NAME: &'static str = "QueryAddressResponse"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.QueryAddressResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.QueryAddressResponse".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryStats {} impl ::prost::Name for QueryStats { const NAME: &'static str = "QueryStats"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.QueryStats".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.QueryStats".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryStatsResponse { #[prost(map = "string, message", tag = "1")] @@ -170,12 +154,10 @@ impl ::prost::Name for QueryStatsResponse { const NAME: &'static str = "QueryStatsResponse"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.QueryStatsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.QueryStatsResponse".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryStatsByChannel { #[prost(string, tag = "1")] @@ -185,12 +167,10 @@ impl ::prost::Name for QueryStatsByChannel { const NAME: &'static str = "QueryStatsByChannel"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.QueryStatsByChannel".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.QueryStatsByChannel".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct QueryStatsByChannelResponse { #[prost(uint64, tag = "1")] @@ -206,12 +186,10 @@ impl ::prost::Name for QueryStatsByChannelResponse { const NAME: &'static str = "QueryStatsByChannelResponse"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.QueryStatsByChannelResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.QueryStatsByChannelResponse".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Stats { #[prost(string, tag = "1")] @@ -229,22 +207,13 @@ impl ::prost::Name for Stats { const NAME: &'static str = "Stats"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.Stats".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.Stats".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; #[derive(Debug, Clone)] @@ -266,8 +235,8 @@ pub mod query_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -292,7 +261,7 @@ pub mod query_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryClient::new(InterceptedService::new(inner, interceptor)) } @@ -338,7 +307,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -362,7 +332,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -386,7 +357,8 @@ pub mod query_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -404,17 +376,11 @@ pub mod query_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServer. #[async_trait] - pub trait Query: std::marker::Send + std::marker::Sync + 'static { + pub trait Query: Send + Sync + 'static { async fn address( &self, request: tonic::Request, @@ -438,18 +404,20 @@ pub mod query_server { >; } #[derive(Debug)] - pub struct QueryServer { - inner: Arc, + pub struct QueryServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServer { + struct _Inner(Arc); + impl QueryServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -499,8 +467,8 @@ pub mod query_server { impl tonic::codegen::Service> for QueryServer where T: Query, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -512,6 +480,7 @@ pub mod query_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/noble.forwarding.v1.Query/Address" => { #[allow(non_camel_case_types)] @@ -540,6 +509,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AddressSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -583,6 +553,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = StatsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -628,6 +599,7 @@ pub mod query_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = StatsByChannelSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -646,25 +618,20 @@ pub mod query_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServer { + impl Clone for QueryServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -676,12 +643,21 @@ pub mod query_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "noble.forwarding.v1.Query"; - impl tonic::server::NamedService for QueryServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServer { + const NAME: &'static str = "noble.forwarding.v1.Query"; } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MsgRegisterAccount { #[prost(string, tag = "1")] @@ -695,12 +671,10 @@ impl ::prost::Name for MsgRegisterAccount { const NAME: &'static str = "MsgRegisterAccount"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.MsgRegisterAccount".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.MsgRegisterAccount".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MsgRegisterAccountResponse { #[prost(string, tag = "1")] @@ -710,12 +684,10 @@ impl ::prost::Name for MsgRegisterAccountResponse { const NAME: &'static str = "MsgRegisterAccountResponse"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.MsgRegisterAccountResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.MsgRegisterAccountResponse".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MsgClearAccount { #[prost(string, tag = "1")] @@ -727,34 +699,23 @@ impl ::prost::Name for MsgClearAccount { const NAME: &'static str = "MsgClearAccount"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.MsgClearAccount".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.MsgClearAccount".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct MsgClearAccountResponse {} impl ::prost::Name for MsgClearAccountResponse { const NAME: &'static str = "MsgClearAccountResponse"; const PACKAGE: &'static str = "noble.forwarding.v1"; fn full_name() -> ::prost::alloc::string::String { - "noble.forwarding.v1.MsgClearAccountResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/noble.forwarding.v1.MsgClearAccountResponse".into() + ::prost::alloc::format!("noble.forwarding.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod msg_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; #[derive(Debug, Clone)] @@ -776,8 +737,8 @@ pub mod msg_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -802,7 +763,7 @@ pub mod msg_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { MsgClient::new(InterceptedService::new(inner, interceptor)) } @@ -848,7 +809,8 @@ pub mod msg_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -872,7 +834,8 @@ pub mod msg_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -890,17 +853,11 @@ pub mod msg_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod msg_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with MsgServer. #[async_trait] - pub trait Msg: std::marker::Send + std::marker::Sync + 'static { + pub trait Msg: Send + Sync + 'static { async fn register_account( &self, request: tonic::Request, @@ -917,18 +874,20 @@ pub mod msg_server { >; } #[derive(Debug)] - pub struct MsgServer { - inner: Arc, + pub struct MsgServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl MsgServer { + struct _Inner(Arc); + impl MsgServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -978,8 +937,8 @@ pub mod msg_server { impl tonic::codegen::Service> for MsgServer where T: Msg, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -991,6 +950,7 @@ pub mod msg_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/noble.forwarding.v1.Msg/RegisterAccount" => { #[allow(non_camel_case_types)] @@ -1019,6 +979,7 @@ pub mod msg_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = RegisterAccountSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1062,6 +1023,7 @@ pub mod msg_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ClearAccountSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1080,25 +1042,20 @@ pub mod msg_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for MsgServer { + impl Clone for MsgServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -1110,9 +1067,17 @@ pub mod msg_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "noble.forwarding.v1.Msg"; - impl tonic::server::NamedService for MsgServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for MsgServer { + const NAME: &'static str = "noble.forwarding.v1.Msg"; } } diff --git a/crates/proto/src/gen/penumbra.cnidarium.v1.rs b/crates/proto/src/gen/penumbra.cnidarium.v1.rs new file mode 100644 index 0000000000..7a04181967 --- /dev/null +++ b/crates/proto/src/gen/penumbra.cnidarium.v1.rs @@ -0,0 +1,815 @@ +/// Performs a key-value query against the nonverifiable storage, +/// using a byte-encoded key. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NonVerifiableKeyValueRequest { + #[prost(message, optional, tag = "1")] + pub key: ::core::option::Option, +} +/// Nested message and enum types in `NonVerifiableKeyValueRequest`. +pub mod non_verifiable_key_value_request { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Key { + #[prost(bytes = "vec", tag = "1")] + pub inner: ::prost::alloc::vec::Vec, + } + impl ::prost::Name for Key { + const NAME: &'static str = "Key"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.NonVerifiableKeyValueRequest.{}", Self::NAME + ) + } + } +} +impl ::prost::Name for NonVerifiableKeyValueRequest { + const NAME: &'static str = "NonVerifiableKeyValueRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct NonVerifiableKeyValueResponse { + /// The value corresponding to the specified key, if it was found. + #[prost(message, optional, tag = "1")] + pub value: ::core::option::Option, +} +/// Nested message and enum types in `NonVerifiableKeyValueResponse`. +pub mod non_verifiable_key_value_response { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Value { + #[prost(bytes = "vec", tag = "1")] + pub value: ::prost::alloc::vec::Vec, + } + impl ::prost::Name for Value { + const NAME: &'static str = "Value"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.NonVerifiableKeyValueResponse.{}", Self::NAME + ) + } + } +} +impl ::prost::Name for NonVerifiableKeyValueResponse { + const NAME: &'static str = "NonVerifiableKeyValueResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Performs a key-value query against the JMT, either by key or by key hash. +/// +/// Proofs are only supported by key. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct KeyValueRequest { + /// If set, the key to fetch from storage. + #[prost(string, tag = "2")] + pub key: ::prost::alloc::string::String, + /// whether to return a proof + #[prost(bool, tag = "3")] + pub proof: bool, +} +impl ::prost::Name for KeyValueRequest { + const NAME: &'static str = "KeyValueRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct KeyValueResponse { + /// The value corresponding to the specified key, if it was found. + #[prost(message, optional, tag = "1")] + pub value: ::core::option::Option, + /// A proof of existence or non-existence. + #[prost(message, optional, tag = "2")] + pub proof: ::core::option::Option< + ::ibc_proto::ibc::core::commitment::v1::MerkleProof, + >, +} +/// Nested message and enum types in `KeyValueResponse`. +pub mod key_value_response { + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct Value { + #[prost(bytes = "vec", tag = "1")] + pub value: ::prost::alloc::vec::Vec, + } + impl ::prost::Name for Value { + const NAME: &'static str = "Value"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!( + "penumbra.cnidarium.v1.KeyValueResponse.{}", Self::NAME + ) + } + } +} +impl ::prost::Name for KeyValueResponse { + const NAME: &'static str = "KeyValueResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Performs a prefixed key-value query, by string prefix. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PrefixValueRequest { + /// The prefix to fetch subkeys from storage. + #[prost(string, tag = "2")] + pub prefix: ::prost::alloc::string::String, +} +impl ::prost::Name for PrefixValueRequest { + const NAME: &'static str = "PrefixValueRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct PrefixValueResponse { + #[prost(string, tag = "1")] + pub key: ::prost::alloc::string::String, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, +} +impl ::prost::Name for PrefixValueResponse { + const NAME: &'static str = "PrefixValueResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Requests a stream of new key-value pairs that have been committed to the state. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WatchRequest { + /// A regex for keys in the verifiable storage. + /// + /// Only key-value updates whose keys match this regex will be returned. + /// Note that the empty string matches all keys. + /// To exclude all keys, use the regex "$^", which matches no strings. + #[prost(string, tag = "1")] + pub key_regex: ::prost::alloc::string::String, + /// A regex for keys in the nonverifiable storage. + /// + /// Only key-value updates whose keys match this regex will be returned. + /// Note that the empty string matches all keys. + /// To exclude all keys, use the regex "$^", which matches no strings. + #[prost(string, tag = "2")] + pub nv_key_regex: ::prost::alloc::string::String, +} +impl ::prost::Name for WatchRequest { + const NAME: &'static str = "WatchRequest"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// A key-value pair that has been committed to the state. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct WatchResponse { + /// The state version the key-value pair was committed at. + #[prost(uint64, tag = "1")] + pub version: u64, + /// The entry that was committed. + #[prost(oneof = "watch_response::Entry", tags = "5, 6")] + pub entry: ::core::option::Option, +} +/// Nested message and enum types in `WatchResponse`. +pub mod watch_response { + /// Elements of the verifiable storage have string keys. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct KeyValue { + #[prost(string, tag = "1")] + pub key: ::prost::alloc::string::String, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, + /// If set to true, the key-value pair was deleted. + /// This allows distinguishing a deleted key-value pair from a key-value pair whose value is empty. + #[prost(bool, tag = "3")] + pub deleted: bool, + } + impl ::prost::Name for KeyValue { + const NAME: &'static str = "KeyValue"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.WatchResponse.{}", Self::NAME) + } + } + /// Elements of the nonverifiable storage have byte keys. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] + pub struct NvKeyValue { + #[prost(bytes = "vec", tag = "1")] + pub key: ::prost::alloc::vec::Vec, + #[prost(bytes = "vec", tag = "2")] + pub value: ::prost::alloc::vec::Vec, + /// If set to true, the key-value pair was deleted. + /// This allows distinguishing a deleted key-value pair from a key-value pair whose value is empty. + #[prost(bool, tag = "3")] + pub deleted: bool, + } + impl ::prost::Name for NvKeyValue { + const NAME: &'static str = "NvKeyValue"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.WatchResponse.{}", Self::NAME) + } + } + /// The entry that was committed. + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] + pub enum Entry { + #[prost(message, tag = "5")] + Kv(KeyValue), + #[prost(message, tag = "6")] + NvKv(NvKeyValue), + } +} +impl ::prost::Name for WatchResponse { + const NAME: &'static str = "WatchResponse"; + const PACKAGE: &'static str = "penumbra.cnidarium.v1"; + fn full_name() -> ::prost::alloc::string::String { + ::prost::alloc::format!("penumbra.cnidarium.v1.{}", Self::NAME) + } +} +/// Generated client implementations. +#[cfg(feature = "rpc")] +pub mod query_service_client { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + use tonic::codegen::http::Uri; + #[derive(Debug, Clone)] + pub struct QueryServiceClient { + inner: tonic::client::Grpc, + } + impl QueryServiceClient { + /// Attempt to create a new client by connecting to a given endpoint. + pub async fn connect(dst: D) -> Result + where + D: TryInto, + D::Error: Into, + { + let conn = tonic::transport::Endpoint::new(dst)?.connect().await?; + Ok(Self::new(conn)) + } + } + impl QueryServiceClient + where + T: tonic::client::GrpcService, + T::Error: Into, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, + { + pub fn new(inner: T) -> Self { + let inner = tonic::client::Grpc::new(inner); + Self { inner } + } + pub fn with_origin(inner: T, origin: Uri) -> Self { + let inner = tonic::client::Grpc::with_origin(inner, origin); + Self { inner } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> QueryServiceClient> + where + F: tonic::service::Interceptor, + T::ResponseBody: Default, + T: tonic::codegen::Service< + http::Request, + Response = http::Response< + >::ResponseBody, + >, + >, + , + >>::Error: Into + Send + Sync, + { + QueryServiceClient::new(InterceptedService::new(inner, interceptor)) + } + /// Compress requests with the given encoding. + /// + /// This requires the server to support it otherwise it might respond with an + /// error. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.send_compressed(encoding); + self + } + /// Enable decompressing responses. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.inner = self.inner.accept_compressed(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_decoding_message_size(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.inner = self.inner.max_encoding_message_size(limit); + self + } + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the JMT storage. + pub async fn key_value( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/KeyValue", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("penumbra.cnidarium.v1.QueryService", "KeyValue"), + ); + self.inner.unary(req, path, codec).await + } + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the non-verifiable storage. + pub async fn non_verifiable_key_value( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/NonVerifiableKeyValue", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new( + "penumbra.cnidarium.v1.QueryService", + "NonVerifiableKeyValue", + ), + ); + self.inner.unary(req, path, codec).await + } + /// General-purpose prefixed key-value state query API, that can be used to query + /// arbitrary prefixes in the JMT storage. + pub async fn prefix_value( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/PrefixValue", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert( + GrpcMethod::new("penumbra.cnidarium.v1.QueryService", "PrefixValue"), + ); + self.inner.server_streaming(req, path, codec).await + } + /// Subscribes to a stream of key-value updates, with regex filtering on keys. + pub async fn watch( + &mut self, + request: impl tonic::IntoRequest, + ) -> std::result::Result< + tonic::Response>, + tonic::Status, + > { + self.inner + .ready() + .await + .map_err(|e| { + tonic::Status::new( + tonic::Code::Unknown, + format!("Service was not ready: {}", e.into()), + ) + })?; + let codec = tonic::codec::ProstCodec::default(); + let path = http::uri::PathAndQuery::from_static( + "/penumbra.cnidarium.v1.QueryService/Watch", + ); + let mut req = request.into_request(); + req.extensions_mut() + .insert(GrpcMethod::new("penumbra.cnidarium.v1.QueryService", "Watch")); + self.inner.server_streaming(req, path, codec).await + } + } +} +/// Generated server implementations. +#[cfg(feature = "rpc")] +pub mod query_service_server { + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] + use tonic::codegen::*; + /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. + #[async_trait] + pub trait QueryService: Send + Sync + 'static { + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the JMT storage. + async fn key_value( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// General-purpose key-value state query API, that can be used to query + /// arbitrary keys in the non-verifiable storage. + async fn non_verifiable_key_value( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the PrefixValue method. + type PrefixValueStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + Send + + 'static; + /// General-purpose prefixed key-value state query API, that can be used to query + /// arbitrary prefixes in the JMT storage. + async fn prefix_value( + &self, + request: tonic::Request, + ) -> std::result::Result< + tonic::Response, + tonic::Status, + >; + /// Server streaming response type for the Watch method. + type WatchStream: tonic::codegen::tokio_stream::Stream< + Item = std::result::Result, + > + + Send + + 'static; + /// Subscribes to a stream of key-value updates, with regex filtering on keys. + async fn watch( + &self, + request: tonic::Request, + ) -> std::result::Result, tonic::Status>; + } + #[derive(Debug)] + pub struct QueryServiceServer { + inner: _Inner, + accept_compression_encodings: EnabledCompressionEncodings, + send_compression_encodings: EnabledCompressionEncodings, + max_decoding_message_size: Option, + max_encoding_message_size: Option, + } + struct _Inner(Arc); + impl QueryServiceServer { + pub fn new(inner: T) -> Self { + Self::from_arc(Arc::new(inner)) + } + pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); + Self { + inner, + accept_compression_encodings: Default::default(), + send_compression_encodings: Default::default(), + max_decoding_message_size: None, + max_encoding_message_size: None, + } + } + pub fn with_interceptor( + inner: T, + interceptor: F, + ) -> InterceptedService + where + F: tonic::service::Interceptor, + { + InterceptedService::new(Self::new(inner), interceptor) + } + /// Enable decompressing requests with the given encoding. + #[must_use] + pub fn accept_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.accept_compression_encodings.enable(encoding); + self + } + /// Compress responses with the given encoding, if the client supports it. + #[must_use] + pub fn send_compressed(mut self, encoding: CompressionEncoding) -> Self { + self.send_compression_encodings.enable(encoding); + self + } + /// Limits the maximum size of a decoded message. + /// + /// Default: `4MB` + #[must_use] + pub fn max_decoding_message_size(mut self, limit: usize) -> Self { + self.max_decoding_message_size = Some(limit); + self + } + /// Limits the maximum size of an encoded message. + /// + /// Default: `usize::MAX` + #[must_use] + pub fn max_encoding_message_size(mut self, limit: usize) -> Self { + self.max_encoding_message_size = Some(limit); + self + } + } + impl tonic::codegen::Service> for QueryServiceServer + where + T: QueryService, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, + { + type Response = http::Response; + type Error = std::convert::Infallible; + type Future = BoxFuture; + fn poll_ready( + &mut self, + _cx: &mut Context<'_>, + ) -> Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); + match req.uri().path() { + "/penumbra.cnidarium.v1.QueryService/KeyValue" => { + #[allow(non_camel_case_types)] + struct KeyValueSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::UnaryService + for KeyValueSvc { + type Response = super::KeyValueResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::key_value(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = KeyValueSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/penumbra.cnidarium.v1.QueryService/NonVerifiableKeyValue" => { + #[allow(non_camel_case_types)] + struct NonVerifiableKeyValueSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::UnaryService + for NonVerifiableKeyValueSvc { + type Response = super::NonVerifiableKeyValueResponse; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::non_verifiable_key_value( + &inner, + request, + ) + .await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = NonVerifiableKeyValueSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.unary(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/penumbra.cnidarium.v1.QueryService/PrefixValue" => { + #[allow(non_camel_case_types)] + struct PrefixValueSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::ServerStreamingService + for PrefixValueSvc { + type Response = super::PrefixValueResponse; + type ResponseStream = T::PrefixValueStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::prefix_value(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = PrefixValueSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + "/penumbra.cnidarium.v1.QueryService/Watch" => { + #[allow(non_camel_case_types)] + struct WatchSvc(pub Arc); + impl< + T: QueryService, + > tonic::server::ServerStreamingService + for WatchSvc { + type Response = super::WatchResponse; + type ResponseStream = T::WatchStream; + type Future = BoxFuture< + tonic::Response, + tonic::Status, + >; + fn call( + &mut self, + request: tonic::Request, + ) -> Self::Future { + let inner = Arc::clone(&self.0); + let fut = async move { + ::watch(&inner, request).await + }; + Box::pin(fut) + } + } + let accept_compression_encodings = self.accept_compression_encodings; + let send_compression_encodings = self.send_compression_encodings; + let max_decoding_message_size = self.max_decoding_message_size; + let max_encoding_message_size = self.max_encoding_message_size; + let inner = self.inner.clone(); + let fut = async move { + let inner = inner.0; + let method = WatchSvc(inner); + let codec = tonic::codec::ProstCodec::default(); + let mut grpc = tonic::server::Grpc::new(codec) + .apply_compression_config( + accept_compression_encodings, + send_compression_encodings, + ) + .apply_max_message_size_config( + max_decoding_message_size, + max_encoding_message_size, + ); + let res = grpc.server_streaming(method, req).await; + Ok(res) + }; + Box::pin(fut) + } + _ => { + Box::pin(async move { + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) + }) + } + } + } + } + impl Clone for QueryServiceServer { + fn clone(&self) -> Self { + let inner = self.inner.clone(); + Self { + inner, + accept_compression_encodings: self.accept_compression_encodings, + send_compression_encodings: self.send_compression_encodings, + max_decoding_message_size: self.max_decoding_message_size, + max_encoding_message_size: self.max_encoding_message_size, + } + } + } + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.cnidarium.v1.QueryService"; + } +} diff --git a/crates/proto/src/gen/penumbra.cnidarium.v1.serde.rs b/crates/proto/src/gen/penumbra.cnidarium.v1.serde.rs new file mode 100644 index 0000000000..8751e80e6e --- /dev/null +++ b/crates/proto/src/gen/penumbra.cnidarium.v1.serde.rs @@ -0,0 +1,1433 @@ +impl serde::Serialize for KeyValueRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if self.proof { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueRequest", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if self.proof { + struct_ser.serialize_field("proof", &self.proof)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for KeyValueRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "proof", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Proof, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "proof" => Ok(GeneratedField::Proof), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = KeyValueRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut proof__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Proof => { + if proof__.is_some() { + return Err(serde::de::Error::duplicate_field("proof")); + } + proof__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(KeyValueRequest { + key: key__.unwrap_or_default(), + proof: proof__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for KeyValueResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + if self.proof.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueResponse", len)?; + if let Some(v) = self.value.as_ref() { + struct_ser.serialize_field("value", v)?; + } + if let Some(v) = self.proof.as_ref() { + struct_ser.serialize_field("proof", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for KeyValueResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + "proof", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + Proof, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + "proof" => Ok(GeneratedField::Proof), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = KeyValueResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + let mut proof__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = map_.next_value()?; + } + GeneratedField::Proof => { + if proof__.is_some() { + return Err(serde::de::Error::duplicate_field("proof")); + } + proof__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(KeyValueResponse { + value: value__, + proof: proof__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for key_value_response::Value { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.KeyValueResponse.Value", len)?; + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for key_value_response::Value { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = key_value_response::Value; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.KeyValueResponse.Value") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(key_value_response::Value { + value: value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.KeyValueResponse.Value", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for NonVerifiableKeyValueRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.key.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueRequest", len)?; + if let Some(v) = self.key.as_ref() { + struct_ser.serialize_field("key", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for NonVerifiableKeyValueRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = NonVerifiableKeyValueRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.NonVerifiableKeyValueRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(NonVerifiableKeyValueRequest { + key: key__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for non_verifiable_key_value_request::Key { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.inner.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueRequest.Key", len)?; + if !self.inner.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for non_verifiable_key_value_request::Key { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "inner", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Inner, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "inner" => Ok(GeneratedField::Inner), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = non_verifiable_key_value_request::Key; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.NonVerifiableKeyValueRequest.Key") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut inner__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Inner => { + if inner__.is_some() { + return Err(serde::de::Error::duplicate_field("inner")); + } + inner__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(non_verifiable_key_value_request::Key { + inner: inner__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueRequest.Key", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for NonVerifiableKeyValueResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.value.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueResponse", len)?; + if let Some(v) = self.value.as_ref() { + struct_ser.serialize_field("value", v)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for NonVerifiableKeyValueResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = NonVerifiableKeyValueResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.NonVerifiableKeyValueResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = map_.next_value()?; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(NonVerifiableKeyValueResponse { + value: value__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for non_verifiable_key_value_response::Value { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueResponse.Value", len)?; + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for non_verifiable_key_value_response::Value { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = non_verifiable_key_value_response::Value; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.NonVerifiableKeyValueResponse.Value") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(non_verifiable_key_value_response::Value { + value: value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.NonVerifiableKeyValueResponse.Value", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PrefixValueRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.prefix.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.PrefixValueRequest", len)?; + if !self.prefix.is_empty() { + struct_ser.serialize_field("prefix", &self.prefix)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PrefixValueRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "prefix", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Prefix, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "prefix" => Ok(GeneratedField::Prefix), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PrefixValueRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.PrefixValueRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut prefix__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Prefix => { + if prefix__.is_some() { + return Err(serde::de::Error::duplicate_field("prefix")); + } + prefix__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(PrefixValueRequest { + prefix: prefix__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.PrefixValueRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for PrefixValueResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.PrefixValueResponse", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for PrefixValueResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = PrefixValueResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.PrefixValueResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(PrefixValueResponse { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.PrefixValueResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for WatchRequest { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key_regex.is_empty() { + len += 1; + } + if !self.nv_key_regex.is_empty() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchRequest", len)?; + if !self.key_regex.is_empty() { + struct_ser.serialize_field("keyRegex", &self.key_regex)?; + } + if !self.nv_key_regex.is_empty() { + struct_ser.serialize_field("nvKeyRegex", &self.nv_key_regex)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for WatchRequest { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key_regex", + "keyRegex", + "nv_key_regex", + "nvKeyRegex", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + KeyRegex, + NvKeyRegex, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "keyRegex" | "key_regex" => Ok(GeneratedField::KeyRegex), + "nvKeyRegex" | "nv_key_regex" => Ok(GeneratedField::NvKeyRegex), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = WatchRequest; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchRequest") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key_regex__ = None; + let mut nv_key_regex__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::KeyRegex => { + if key_regex__.is_some() { + return Err(serde::de::Error::duplicate_field("keyRegex")); + } + key_regex__ = Some(map_.next_value()?); + } + GeneratedField::NvKeyRegex => { + if nv_key_regex__.is_some() { + return Err(serde::de::Error::duplicate_field("nvKeyRegex")); + } + nv_key_regex__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(WatchRequest { + key_regex: key_regex__.unwrap_or_default(), + nv_key_regex: nv_key_regex__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchRequest", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for WatchResponse { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if self.version != 0 { + len += 1; + } + if self.entry.is_some() { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse", len)?; + if self.version != 0 { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("version", ToString::to_string(&self.version).as_str())?; + } + if let Some(v) = self.entry.as_ref() { + match v { + watch_response::Entry::Kv(v) => { + struct_ser.serialize_field("kv", v)?; + } + watch_response::Entry::NvKv(v) => { + struct_ser.serialize_field("nvKv", v)?; + } + } + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for WatchResponse { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "version", + "kv", + "nv_kv", + "nvKv", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Version, + Kv, + NvKv, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "version" => Ok(GeneratedField::Version), + "kv" => Ok(GeneratedField::Kv), + "nvKv" | "nv_kv" => Ok(GeneratedField::NvKv), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = WatchResponse; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut version__ = None; + let mut entry__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Version => { + if version__.is_some() { + return Err(serde::de::Error::duplicate_field("version")); + } + version__ = + Some(map_.next_value::<::pbjson::private::NumberDeserialize<_>>()?.0) + ; + } + GeneratedField::Kv => { + if entry__.is_some() { + return Err(serde::de::Error::duplicate_field("kv")); + } + entry__ = map_.next_value::<::std::option::Option<_>>()?.map(watch_response::Entry::Kv) +; + } + GeneratedField::NvKv => { + if entry__.is_some() { + return Err(serde::de::Error::duplicate_field("nvKv")); + } + entry__ = map_.next_value::<::std::option::Option<_>>()?.map(watch_response::Entry::NvKv) +; + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(WatchResponse { + version: version__.unwrap_or_default(), + entry: entry__, + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for watch_response::KeyValue { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + if self.deleted { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse.KeyValue", len)?; + if !self.key.is_empty() { + struct_ser.serialize_field("key", &self.key)?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + if self.deleted { + struct_ser.serialize_field("deleted", &self.deleted)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for watch_response::KeyValue { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + "deleted", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + Deleted, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + "deleted" => Ok(GeneratedField::Deleted), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = watch_response::KeyValue; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse.KeyValue") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + let mut deleted__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = Some(map_.next_value()?); + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Deleted => { + if deleted__.is_some() { + return Err(serde::de::Error::duplicate_field("deleted")); + } + deleted__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(watch_response::KeyValue { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + deleted: deleted__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse.KeyValue", FIELDS, GeneratedVisitor) + } +} +impl serde::Serialize for watch_response::NvKeyValue { + #[allow(deprecated)] + fn serialize(&self, serializer: S) -> std::result::Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut len = 0; + if !self.key.is_empty() { + len += 1; + } + if !self.value.is_empty() { + len += 1; + } + if self.deleted { + len += 1; + } + let mut struct_ser = serializer.serialize_struct("penumbra.cnidarium.v1.WatchResponse.NvKeyValue", len)?; + if !self.key.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("key", pbjson::private::base64::encode(&self.key).as_str())?; + } + if !self.value.is_empty() { + #[allow(clippy::needless_borrow)] + struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; + } + if self.deleted { + struct_ser.serialize_field("deleted", &self.deleted)?; + } + struct_ser.end() + } +} +impl<'de> serde::Deserialize<'de> for watch_response::NvKeyValue { + #[allow(deprecated)] + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + const FIELDS: &[&str] = &[ + "key", + "value", + "deleted", + ]; + + #[allow(clippy::enum_variant_names)] + enum GeneratedField { + Key, + Value, + Deleted, + __SkipField__, + } + impl<'de> serde::Deserialize<'de> for GeneratedField { + fn deserialize(deserializer: D) -> std::result::Result + where + D: serde::Deserializer<'de>, + { + struct GeneratedVisitor; + + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = GeneratedField; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(formatter, "expected one of: {:?}", &FIELDS) + } + + #[allow(unused_variables)] + fn visit_str(self, value: &str) -> std::result::Result + where + E: serde::de::Error, + { + match value { + "key" => Ok(GeneratedField::Key), + "value" => Ok(GeneratedField::Value), + "deleted" => Ok(GeneratedField::Deleted), + _ => Ok(GeneratedField::__SkipField__), + } + } + } + deserializer.deserialize_identifier(GeneratedVisitor) + } + } + struct GeneratedVisitor; + impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { + type Value = watch_response::NvKeyValue; + + fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + formatter.write_str("struct penumbra.cnidarium.v1.WatchResponse.NvKeyValue") + } + + fn visit_map(self, mut map_: V) -> std::result::Result + where + V: serde::de::MapAccess<'de>, + { + let mut key__ = None; + let mut value__ = None; + let mut deleted__ = None; + while let Some(k) = map_.next_key()? { + match k { + GeneratedField::Key => { + if key__.is_some() { + return Err(serde::de::Error::duplicate_field("key")); + } + key__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Value => { + if value__.is_some() { + return Err(serde::de::Error::duplicate_field("value")); + } + value__ = + Some(map_.next_value::<::pbjson::private::BytesDeserialize<_>>()?.0) + ; + } + GeneratedField::Deleted => { + if deleted__.is_some() { + return Err(serde::de::Error::duplicate_field("deleted")); + } + deleted__ = Some(map_.next_value()?); + } + GeneratedField::__SkipField__ => { + let _ = map_.next_value::()?; + } + } + } + Ok(watch_response::NvKeyValue { + key: key__.unwrap_or_default(), + value: value__.unwrap_or_default(), + deleted: deleted__.unwrap_or_default(), + }) + } + } + deserializer.deserialize_struct("penumbra.cnidarium.v1.WatchResponse.NvKeyValue", FIELDS, GeneratedVisitor) + } +} diff --git a/crates/proto/src/gen/penumbra.core.app.v1.rs b/crates/proto/src/gen/penumbra.core.app.v1.rs index 5030d31632..ced7c4b31f 100644 --- a/crates/proto/src/gen/penumbra.core.app.v1.rs +++ b/crates/proto/src/gen/penumbra.core.app.v1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// Requests the list of all transactions that occurred within a given block. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionsByHeightRequest { /// The block height to retrieve. #[prost(uint64, tag = "2")] @@ -10,13 +10,11 @@ impl ::prost::Name for TransactionsByHeightRequest { const NAME: &'static str = "TransactionsByHeightRequest"; const PACKAGE: &'static str = "penumbra.core.app.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.app.v1.TransactionsByHeightRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.app.v1.TransactionsByHeightRequest".into() + ::prost::alloc::format!("penumbra.core.app.v1.{}", Self::NAME) } } /// A transaction that appeared within a given block. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionsByHeightResponse { /// The transactions. @@ -32,12 +30,10 @@ impl ::prost::Name for TransactionsByHeightResponse { const NAME: &'static str = "TransactionsByHeightResponse"; const PACKAGE: &'static str = "penumbra.core.app.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.app.v1.TransactionsByHeightResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.app.v1.TransactionsByHeightResponse".into() + ::prost::alloc::format!("penumbra.core.app.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AppParameters { /// The chain identifier. @@ -103,25 +99,21 @@ impl ::prost::Name for AppParameters { const NAME: &'static str = "AppParameters"; const PACKAGE: &'static str = "penumbra.core.app.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.app.v1.AppParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.app.v1.AppParameters".into() + ::prost::alloc::format!("penumbra.core.app.v1.{}", Self::NAME) } } /// Requests the global configuration data for the app. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct AppParametersRequest {} impl ::prost::Name for AppParametersRequest { const NAME: &'static str = "AppParametersRequest"; const PACKAGE: &'static str = "penumbra.core.app.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.app.v1.AppParametersRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.app.v1.AppParametersRequest".into() + ::prost::alloc::format!("penumbra.core.app.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AppParametersResponse { #[prost(message, optional, tag = "1")] @@ -131,12 +123,10 @@ impl ::prost::Name for AppParametersResponse { const NAME: &'static str = "AppParametersResponse"; const PACKAGE: &'static str = "penumbra.core.app.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.app.v1.AppParametersResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.app.v1.AppParametersResponse".into() + ::prost::alloc::format!("penumbra.core.app.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisAppState { #[prost(oneof = "genesis_app_state::GenesisAppState", tags = "1, 2")] @@ -144,6 +134,7 @@ pub struct GenesisAppState { } /// Nested message and enum types in `GenesisAppState`. pub mod genesis_app_state { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum GenesisAppState { #[prost(message, tag = "1")] @@ -156,12 +147,10 @@ impl ::prost::Name for GenesisAppState { const NAME: &'static str = "GenesisAppState"; const PACKAGE: &'static str = "penumbra.core.app.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.app.v1.GenesisAppState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.app.v1.GenesisAppState".into() + ::prost::alloc::format!("penumbra.core.app.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// The initial chain identifier. @@ -227,22 +216,13 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.app.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.app.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.app.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.app.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the overall Penumbra application. @@ -265,8 +245,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -291,7 +271,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -338,7 +318,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -365,7 +346,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -388,17 +370,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { /// Gets the app parameters. async fn app_parameters( &self, @@ -418,18 +394,20 @@ pub mod query_service_server { } /// Query operations for the overall Penumbra application. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -479,8 +457,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -492,6 +470,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.app.v1.QueryService/AppParameters" => { #[allow(non_camel_case_types)] @@ -522,6 +501,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AppParametersSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -568,6 +548,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TransactionsByHeightSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -586,25 +567,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -616,9 +592,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.app.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.app.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.app.v1.serde.rs b/crates/proto/src/gen/penumbra.core.app.v1.serde.rs index 8f8c1b398e..9b910dd7fe 100644 --- a/crates/proto/src/gen/penumbra.core.app.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.app.v1.serde.rs @@ -479,7 +479,6 @@ impl serde::Serialize for GenesisAppState { } genesis_app_state::GenesisAppState::GenesisCheckpoint(v) => { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("genesisCheckpoint", pbjson::private::base64::encode(&v).as_str())?; } } @@ -884,7 +883,6 @@ impl serde::Serialize for TransactionsByHeightRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.app.v1.TransactionsByHeightRequest", len)?; if self.block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("blockHeight", ToString::to_string(&self.block_height).as_str())?; } struct_ser.end() @@ -990,7 +988,6 @@ impl serde::Serialize for TransactionsByHeightResponse { } if self.block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("blockHeight", ToString::to_string(&self.block_height).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.asset.v1.rs b/crates/proto/src/gen/penumbra.core.asset.v1.rs index 737329422a..7865016a58 100644 --- a/crates/proto/src/gen/penumbra.core.asset.v1.rs +++ b/crates/proto/src/gen/penumbra.core.asset.v1.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BalanceCommitment { #[prost(bytes = "vec", tag = "1")] @@ -8,13 +8,11 @@ impl ::prost::Name for BalanceCommitment { const NAME: &'static str = "BalanceCommitment"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.BalanceCommitment".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.BalanceCommitment".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } /// A Penumbra asset ID. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetId { /// The bytes of the asset ID. @@ -42,12 +40,10 @@ impl ::prost::Name for AssetId { const NAME: &'static str = "AssetId"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.AssetId".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.AssetId".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Denom { #[prost(string, tag = "1")] @@ -57,13 +53,11 @@ impl ::prost::Name for Denom { const NAME: &'static str = "Denom"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.Denom".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.Denom".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } /// Describes metadata about a given asset. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Metadata { #[prost(string, tag = "1")] @@ -104,13 +98,11 @@ impl ::prost::Name for Metadata { const NAME: &'static str = "Metadata"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.Metadata".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.Metadata".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } /// DenomUnit represents a struct that describes a given denomination unit of the basic token. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DenomUnit { /// denom represents the string name of the given denom unit (e.g uatom). @@ -131,12 +123,10 @@ impl ::prost::Name for DenomUnit { const NAME: &'static str = "DenomUnit"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.DenomUnit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.DenomUnit".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Value { #[prost(message, optional, tag = "1")] @@ -148,12 +138,10 @@ impl ::prost::Name for Value { const NAME: &'static str = "Value"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.Value".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.Value".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Balance { /// Represents the vector of 'Value's in the balance. @@ -162,6 +150,7 @@ pub struct Balance { } /// Nested message and enum types in `Balance`. pub mod balance { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignedValue { #[prost(message, optional, tag = "1")] @@ -173,10 +162,7 @@ pub mod balance { const NAME: &'static str = "SignedValue"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.Balance.SignedValue".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.Balance.SignedValue".into() + ::prost::alloc::format!("penumbra.core.asset.v1.Balance.{}", Self::NAME) } } } @@ -184,13 +170,11 @@ impl ::prost::Name for Balance { const NAME: &'static str = "Balance"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.Balance".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.Balance".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } /// Represents a value of a known or unknown denomination. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValueView { #[prost(oneof = "value_view::ValueView", tags = "1, 2")] @@ -199,6 +183,7 @@ pub struct ValueView { /// Nested message and enum types in `ValueView`. pub mod value_view { /// A value whose asset ID is known and has metadata. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct KnownAssetId { /// The amount of the value. @@ -226,13 +211,11 @@ pub mod value_view { const NAME: &'static str = "KnownAssetId"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.ValueView.KnownAssetId".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.ValueView.KnownAssetId".into() + ::prost::alloc::format!("penumbra.core.asset.v1.ValueView.{}", Self::NAME) } } /// A value whose asset ID is unknown, with no metadata. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UnknownAssetId { #[prost(message, optional, tag = "1")] @@ -244,12 +227,10 @@ pub mod value_view { const NAME: &'static str = "UnknownAssetId"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.ValueView.UnknownAssetId".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.ValueView.UnknownAssetId".into() + ::prost::alloc::format!("penumbra.core.asset.v1.ValueView.{}", Self::NAME) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum ValueView { #[prost(message, tag = "1")] @@ -262,13 +243,11 @@ impl ::prost::Name for ValueView { const NAME: &'static str = "ValueView"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.ValueView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.ValueView".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } /// An image related to an asset. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetImage { /// The URI of the image in PNG format. @@ -282,6 +261,7 @@ pub struct AssetImage { } /// Nested message and enum types in `AssetImage`. pub mod asset_image { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Theme { /// Should be in hex format, `^#\[0-9a-fA-F\]{6}$`. @@ -296,10 +276,7 @@ pub mod asset_image { const NAME: &'static str = "Theme"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.AssetImage.Theme".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.AssetImage.Theme".into() + ::prost::alloc::format!("penumbra.core.asset.v1.AssetImage.{}", Self::NAME) } } } @@ -307,15 +284,13 @@ impl ::prost::Name for AssetImage { const NAME: &'static str = "AssetImage"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.AssetImage".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.AssetImage".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } /// The estimated price of one asset in terms of a numeraire. /// /// This is used for generating "equivalent values" in ValueViews. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EstimatedPrice { #[prost(message, optional, tag = "1")] @@ -335,10 +310,7 @@ impl ::prost::Name for EstimatedPrice { const NAME: &'static str = "EstimatedPrice"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.EstimatedPrice".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.EstimatedPrice".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } /// An "equivalent" value to a given value, in terms of a numeraire. @@ -347,6 +319,7 @@ impl ::prost::Name for EstimatedPrice { /// stablecoin, or an amount of the staking token, etc. A view server can /// optionally include this information to assist a frontend in displaying /// information about the value in a user-friendly way. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EquivalentValue { /// The equivalent amount of the parent Value in terms of the numeraire. @@ -363,9 +336,6 @@ impl ::prost::Name for EquivalentValue { const NAME: &'static str = "EquivalentValue"; const PACKAGE: &'static str = "penumbra.core.asset.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.asset.v1.EquivalentValue".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.asset.v1.EquivalentValue".into() + ::prost::alloc::format!("penumbra.core.asset.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.core.asset.v1.serde.rs b/crates/proto/src/gen/penumbra.core.asset.v1.serde.rs index e0233c0b07..d760c768a6 100644 --- a/crates/proto/src/gen/penumbra.core.asset.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.asset.v1.serde.rs @@ -18,7 +18,6 @@ impl serde::Serialize for AssetId { let mut struct_ser = serializer.serialize_struct("penumbra.core.asset.v1.AssetId", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } if !self.alt_bech32m.is_empty() { @@ -614,7 +613,6 @@ impl serde::Serialize for BalanceCommitment { let mut struct_ser = serializer.serialize_struct("penumbra.core.asset.v1.BalanceCommitment", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -951,7 +949,6 @@ impl serde::Serialize for EquivalentValue { } if self.as_of_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("asOfHeight", ToString::to_string(&self.as_of_height).as_str())?; } struct_ser.end() @@ -1092,7 +1089,6 @@ impl serde::Serialize for EstimatedPrice { } if self.as_of_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("asOfHeight", ToString::to_string(&self.as_of_height).as_str())?; } struct_ser.end() @@ -1280,7 +1276,6 @@ impl serde::Serialize for Metadata { } if self.priority_score != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("priorityScore", ToString::to_string(&self.priority_score).as_str())?; } if !self.badges.is_empty() { diff --git a/crates/proto/src/gen/penumbra.core.component.auction.v1.rs b/crates/proto/src/gen/penumbra.core.component.auction.v1.rs index b3655eaf63..e490a73599 100644 --- a/crates/proto/src/gen/penumbra.core.component.auction.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.auction.v1.rs @@ -1,19 +1,17 @@ -// This file is @generated by prost-build. /// The configuration parameters for the auction component. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionParameters {} impl ::prost::Name for AuctionParameters { const NAME: &'static str = "AuctionParameters"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.AuctionParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.AuctionParameters".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// Genesis data for the auction component. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// The configuration parameters for the auction component at genesis. #[prost(message, optional, tag = "1")] @@ -23,12 +21,10 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionStateByIdRequest { #[prost(message, optional, tag = "1")] @@ -38,12 +34,10 @@ impl ::prost::Name for AuctionStateByIdRequest { const NAME: &'static str = "AuctionStateByIdRequest"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.AuctionStateByIdRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.AuctionStateByIdRequest".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionStateByIdResponse { /// If present, the state of the auction. If not present, no such auction is known. @@ -59,12 +53,10 @@ impl ::prost::Name for AuctionStateByIdResponse { const NAME: &'static str = "AuctionStateByIdResponse"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.AuctionStateByIdResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.AuctionStateByIdResponse".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionStateByIdsRequest { /// The auction IDs to request. Only known IDs will be returned in the response. @@ -75,12 +67,10 @@ impl ::prost::Name for AuctionStateByIdsRequest { const NAME: &'static str = "AuctionStateByIdsRequest"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.AuctionStateByIdsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.AuctionStateByIdsRequest".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionStateByIdsResponse { /// The auction ID of the returned auction. @@ -99,14 +89,12 @@ impl ::prost::Name for AuctionStateByIdsResponse { const NAME: &'static str = "AuctionStateByIdsResponse"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.AuctionStateByIdsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.AuctionStateByIdsResponse".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// A unique identifier for an auction, obtained from hashing a domain separator /// along with the immutable part of an auction description. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionId { #[prost(bytes = "vec", tag = "1")] @@ -116,13 +104,11 @@ impl ::prost::Name for AuctionId { const NAME: &'static str = "AuctionId"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.AuctionId".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.AuctionId".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// A bearer NFT tracking ownership of an auction and its proceeds. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionNft { #[prost(message, optional, tag = "1")] @@ -134,13 +120,11 @@ impl ::prost::Name for AuctionNft { const NAME: &'static str = "AuctionNft"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.AuctionNft".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.AuctionNft".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// Describes a Dutch auction using programmatic liquidity on the DEX. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DutchAuctionDescription { /// The value the seller wishes to auction. @@ -184,12 +168,10 @@ impl ::prost::Name for DutchAuctionDescription { const NAME: &'static str = "DutchAuctionDescription"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.DutchAuctionDescription".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.DutchAuctionDescription".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DutchAuctionState { /// The sequence number of the auction state. @@ -221,12 +203,10 @@ impl ::prost::Name for DutchAuctionState { const NAME: &'static str = "DutchAuctionState"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.DutchAuctionState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.DutchAuctionState".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DutchAuction { /// The immutable data describing the auction and its auction ID. @@ -240,13 +220,11 @@ impl ::prost::Name for DutchAuction { const NAME: &'static str = "DutchAuction"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.DutchAuction".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.DutchAuction".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// Initiates a Dutch auction using protocol-controlled liquidity. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionSchedule { #[prost(message, optional, tag = "1")] @@ -256,13 +234,11 @@ impl ::prost::Name for ActionDutchAuctionSchedule { const NAME: &'static str = "ActionDutchAuctionSchedule"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.ActionDutchAuctionSchedule".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.ActionDutchAuctionSchedule".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// Terminate the auction associated with the specified `auction_id` +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionEnd { /// The auction to end. @@ -273,13 +249,11 @@ impl ::prost::Name for ActionDutchAuctionEnd { const NAME: &'static str = "ActionDutchAuctionEnd"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.ActionDutchAuctionEnd".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.ActionDutchAuctionEnd".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// Withdraw funds from the ended auction associated with the specified `auction_id` +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionWithdraw { /// The auction to withdraw funds from. @@ -302,13 +276,11 @@ impl ::prost::Name for ActionDutchAuctionWithdraw { const NAME: &'static str = "ActionDutchAuctionWithdraw"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.ActionDutchAuctionWithdraw".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.ActionDutchAuctionWithdraw".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// A plan to a `ActionDutchAuctionWithdraw` which contains both private and public data. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionWithdrawPlan { #[prost(message, optional, tag = "1")] @@ -324,13 +296,11 @@ impl ::prost::Name for ActionDutchAuctionWithdrawPlan { const NAME: &'static str = "ActionDutchAuctionWithdrawPlan"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.ActionDutchAuctionWithdrawPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.ActionDutchAuctionWithdrawPlan".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// An `ActionDutchAuctionSchedule` augmented with additional metadata. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionScheduleView { #[prost(message, optional, tag = "1")] @@ -348,13 +318,11 @@ impl ::prost::Name for ActionDutchAuctionScheduleView { const NAME: &'static str = "ActionDutchAuctionScheduleView"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.ActionDutchAuctionScheduleView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.ActionDutchAuctionScheduleView".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// An `ActionDutchAuctionWithdraw` augmented with additional metadata. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionWithdrawView { #[prost(message, optional, tag = "1")] @@ -368,12 +336,10 @@ impl ::prost::Name for ActionDutchAuctionWithdrawView { const NAME: &'static str = "ActionDutchAuctionWithdrawView"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.ActionDutchAuctionWithdrawView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.ActionDutchAuctionWithdrawView".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventDutchAuctionScheduled { #[prost(message, optional, tag = "1")] @@ -385,12 +351,10 @@ impl ::prost::Name for EventDutchAuctionScheduled { const NAME: &'static str = "EventDutchAuctionScheduled"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.EventDutchAuctionScheduled".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.EventDutchAuctionScheduled".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventDutchAuctionUpdated { #[prost(message, optional, tag = "1")] @@ -402,12 +366,10 @@ impl ::prost::Name for EventDutchAuctionUpdated { const NAME: &'static str = "EventDutchAuctionUpdated"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.EventDutchAuctionUpdated".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.EventDutchAuctionUpdated".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventDutchAuctionEnded { #[prost(message, optional, tag = "1")] @@ -448,10 +410,10 @@ pub mod event_dutch_auction_ended { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "REASON_UNSPECIFIED", - Self::Expired => "REASON_EXPIRED", - Self::Filled => "REASON_FILLED", - Self::ClosedByOwner => "REASON_CLOSED_BY_OWNER", + Reason::Unspecified => "REASON_UNSPECIFIED", + Reason::Expired => "REASON_EXPIRED", + Reason::Filled => "REASON_FILLED", + Reason::ClosedByOwner => "REASON_CLOSED_BY_OWNER", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -470,12 +432,10 @@ impl ::prost::Name for EventDutchAuctionEnded { const NAME: &'static str = "EventDutchAuctionEnded"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.EventDutchAuctionEnded".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.EventDutchAuctionEnded".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventDutchAuctionWithdrawn { #[prost(message, optional, tag = "1")] @@ -487,13 +447,11 @@ impl ::prost::Name for EventDutchAuctionWithdrawn { const NAME: &'static str = "EventDutchAuctionWithdrawn"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.EventDutchAuctionWithdrawn".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.EventDutchAuctionWithdrawn".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// A message emitted when value flows *into* the auction component. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValueCircuitBreakerCredit { /// The asset ID being deposited into the Auction component. @@ -510,13 +468,11 @@ impl ::prost::Name for EventValueCircuitBreakerCredit { const NAME: &'static str = "EventValueCircuitBreakerCredit"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.EventValueCircuitBreakerCredit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.EventValueCircuitBreakerCredit".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// A message emitted when value flows *out* of the auction component. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValueCircuitBreakerDebit { /// The asset ID being deposited into the Auction component. @@ -533,22 +489,13 @@ impl ::prost::Name for EventValueCircuitBreakerDebit { const NAME: &'static str = "EventValueCircuitBreakerDebit"; const PACKAGE: &'static str = "penumbra.core.component.auction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.auction.v1.EventValueCircuitBreakerDebit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.auction.v1.EventValueCircuitBreakerDebit".into() + ::prost::alloc::format!("penumbra.core.component.auction.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the auction component. @@ -571,8 +518,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -597,7 +544,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -644,7 +591,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -674,7 +622,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -697,17 +646,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { /// Get the current state of an auction by ID. async fn auction_state_by_id( &self, @@ -723,7 +666,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Get the current state of a group of auctions by ID. async fn auction_state_by_ids( @@ -736,18 +679,20 @@ pub mod query_service_server { } /// Query operations for the auction component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -797,8 +742,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -810,6 +755,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.auction.v1.QueryService/AuctionStateById" => { #[allow(non_camel_case_types)] @@ -841,6 +787,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AuctionStateByIdSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -889,6 +836,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AuctionStateByIdsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -907,25 +855,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -937,9 +880,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.auction.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.auction.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.auction.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.auction.v1.serde.rs index 56aae8af10..d9fa61ccf8 100644 --- a/crates/proto/src/gen/penumbra.core.component.auction.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.auction.v1.serde.rs @@ -361,7 +361,6 @@ impl serde::Serialize for ActionDutchAuctionWithdraw { } if self.seq != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("seq", ToString::to_string(&self.seq).as_str())?; } if let Some(v) = self.reserves_commitment.as_ref() { @@ -499,7 +498,6 @@ impl serde::Serialize for ActionDutchAuctionWithdrawPlan { } if self.seq != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("seq", ToString::to_string(&self.seq).as_str())?; } if let Some(v) = self.reserves_input.as_ref() { @@ -752,7 +750,6 @@ impl serde::Serialize for AuctionId { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.auction.v1.AuctionId", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -857,7 +854,6 @@ impl serde::Serialize for AuctionNft { } if self.seq != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("seq", ToString::to_string(&self.seq).as_str())?; } struct_ser.end() @@ -1615,22 +1611,18 @@ impl serde::Serialize for DutchAuctionDescription { } if self.start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startHeight", ToString::to_string(&self.start_height).as_str())?; } if self.end_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endHeight", ToString::to_string(&self.end_height).as_str())?; } if self.step_count != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("stepCount", ToString::to_string(&self.step_count).as_str())?; } if !self.nonce.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nonce", pbjson::private::base64::encode(&self.nonce).as_str())?; } struct_ser.end() @@ -1830,7 +1822,6 @@ impl serde::Serialize for DutchAuctionState { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.auction.v1.DutchAuctionState", len)?; if self.seq != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("seq", ToString::to_string(&self.seq).as_str())?; } if let Some(v) = self.current_position.as_ref() { @@ -1838,7 +1829,6 @@ impl serde::Serialize for DutchAuctionState { } if self.next_trigger != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nextTrigger", ToString::to_string(&self.next_trigger).as_str())?; } if let Some(v) = self.input_reserves.as_ref() { diff --git a/crates/proto/src/gen/penumbra.core.component.community_pool.v1.rs b/crates/proto/src/gen/penumbra.core.component.community_pool.v1.rs index aa173ad4d0..98e2df1748 100644 --- a/crates/proto/src/gen/penumbra.core.component.community_pool.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.community_pool.v1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// CommunityPool parameter data. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct CommunityPoolParameters { /// Whether Community Pool spend proposals are enabled. #[prost(bool, tag = "1")] @@ -10,13 +10,13 @@ impl ::prost::Name for CommunityPoolParameters { const NAME: &'static str = "CommunityPoolParameters"; const PACKAGE: &'static str = "penumbra.core.component.community_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.community_pool.v1.CommunityPoolParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.community_pool.v1.CommunityPoolParameters".into() + ::prost::alloc::format!( + "penumbra.core.component.community_pool.v1.{}", Self::NAME + ) } } /// CommunityPool genesis state. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// CommunityPool parameters. @@ -30,13 +30,13 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.community_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.community_pool.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.community_pool.v1.GenesisContent".into() + ::prost::alloc::format!( + "penumbra.core.component.community_pool.v1.{}", Self::NAME + ) } } /// Requests the list of all asset balances associated with the Community Pool. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommunityPoolAssetBalancesRequest { /// (Optional): The specific asset balances to retrieve, if excluded all will be returned. @@ -47,15 +47,13 @@ impl ::prost::Name for CommunityPoolAssetBalancesRequest { const NAME: &'static str = "CommunityPoolAssetBalancesRequest"; const PACKAGE: &'static str = "penumbra.core.component.community_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.community_pool.v1.CommunityPoolAssetBalancesRequest" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.community_pool.v1.CommunityPoolAssetBalancesRequest" - .into() + ::prost::alloc::format!( + "penumbra.core.component.community_pool.v1.{}", Self::NAME + ) } } /// The Community Pool's balance of a single asset. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommunityPoolAssetBalancesResponse { /// The balance for a single asset. @@ -66,24 +64,15 @@ impl ::prost::Name for CommunityPoolAssetBalancesResponse { const NAME: &'static str = "CommunityPoolAssetBalancesResponse"; const PACKAGE: &'static str = "penumbra.core.component.community_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.community_pool.v1.CommunityPoolAssetBalancesResponse" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.community_pool.v1.CommunityPoolAssetBalancesResponse" - .into() + ::prost::alloc::format!( + "penumbra.core.component.community_pool.v1.{}", Self::NAME + ) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the community_pool component. @@ -106,8 +95,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -132,7 +121,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -180,7 +169,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -203,17 +193,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { /// Server streaming response type for the CommunityPoolAssetBalances method. type CommunityPoolAssetBalancesStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result< @@ -221,7 +205,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; async fn community_pool_asset_balances( &self, @@ -233,18 +217,20 @@ pub mod query_service_server { } /// Query operations for the community_pool component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -294,8 +280,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -307,6 +293,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.community_pool.v1.QueryService/CommunityPoolAssetBalances" => { #[allow(non_camel_case_types)] @@ -345,6 +332,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CommunityPoolAssetBalancesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -363,25 +351,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -393,9 +376,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.community_pool.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.community_pool.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.compact_block.v1.rs b/crates/proto/src/gen/penumbra.core.component.compact_block.v1.rs index 4c637af38c..08d2c5abf6 100644 --- a/crates/proto/src/gen/penumbra.core.component.compact_block.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.compact_block.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// Contains the minimum data needed to update client state. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CompactBlock { #[prost(uint64, tag = "1")] @@ -50,13 +50,13 @@ impl ::prost::Name for CompactBlock { const NAME: &'static str = "CompactBlock"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.CompactBlock".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.CompactBlock".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.{}", Self::NAME + ) } } /// An encrypted payload, corresponding to a single commitment in the state commitment tree. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct StatePayload { /// The source of the payload, if known. @@ -68,6 +68,7 @@ pub struct StatePayload { } /// Nested message and enum types in `StatePayload`. pub mod state_payload { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RolledUp { #[prost(message, optional, tag = "1")] @@ -79,12 +80,12 @@ pub mod state_payload { const NAME: &'static str = "RolledUp"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.StatePayload.RolledUp".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.StatePayload.RolledUp".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.StatePayload.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Note { #[prost(message, optional, tag = "2")] @@ -96,12 +97,12 @@ pub mod state_payload { const NAME: &'static str = "Note"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.StatePayload.Note".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.StatePayload.Note".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.StatePayload.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Swap { #[prost(message, optional, tag = "2")] @@ -111,13 +112,13 @@ pub mod state_payload { const NAME: &'static str = "Swap"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.StatePayload.Swap".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.StatePayload.Swap".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.StatePayload.{}", Self::NAME + ) } } /// The state payload itself. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum StatePayload { #[prost(message, tag = "2")] @@ -132,14 +133,14 @@ impl ::prost::Name for StatePayload { const NAME: &'static str = "StatePayload"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.StatePayload".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.StatePayload".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.{}", Self::NAME + ) } } /// Requests a range of compact block data. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct CompactBlockRangeRequest { /// The start height of the range. #[prost(uint64, tag = "2")] @@ -156,12 +157,12 @@ impl ::prost::Name for CompactBlockRangeRequest { const NAME: &'static str = "CompactBlockRangeRequest"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.CompactBlockRangeRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.CompactBlockRangeRequest".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CompactBlockRangeResponse { #[prost(message, optional, tag = "1")] @@ -171,13 +172,13 @@ impl ::prost::Name for CompactBlockRangeResponse { const NAME: &'static str = "CompactBlockRangeResponse"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.CompactBlockRangeResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.CompactBlockRangeResponse".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.{}", Self::NAME + ) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct CompactBlockRequest { #[prost(uint64, tag = "1")] pub height: u64, @@ -186,12 +187,12 @@ impl ::prost::Name for CompactBlockRequest { const NAME: &'static str = "CompactBlockRequest"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.CompactBlockRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.CompactBlockRequest".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CompactBlockResponse { #[prost(message, optional, tag = "1")] @@ -201,22 +202,15 @@ impl ::prost::Name for CompactBlockResponse { const NAME: &'static str = "CompactBlockResponse"; const PACKAGE: &'static str = "penumbra.core.component.compact_block.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.compact_block.v1.CompactBlockResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.compact_block.v1.CompactBlockResponse".into() + ::prost::alloc::format!( + "penumbra.core.component.compact_block.v1.{}", Self::NAME + ) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the compact block component. @@ -239,8 +233,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -265,7 +259,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -312,7 +306,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -344,7 +339,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -367,17 +363,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { /// Server streaming response type for the CompactBlockRange method. type CompactBlockRangeStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result< @@ -385,7 +375,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Returns a stream of compact blocks, optionally keeping the stream alive for push notifications. async fn compact_block_range( @@ -408,18 +398,20 @@ pub mod query_service_server { } /// Query operations for the compact block component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -469,8 +461,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -482,6 +474,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.compact_block.v1.QueryService/CompactBlockRange" => { #[allow(non_camel_case_types)] @@ -515,6 +508,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CompactBlockRangeSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -560,6 +554,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CompactBlockSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -578,25 +573,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -608,9 +598,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.compact_block.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.compact_block.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.compact_block.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.compact_block.v1.serde.rs index 70d2530169..0f4dee3282 100644 --- a/crates/proto/src/gen/penumbra.core.component.compact_block.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.compact_block.v1.serde.rs @@ -45,7 +45,6 @@ impl serde::Serialize for CompactBlock { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.compact_block.v1.CompactBlock", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if !self.state_payloads.is_empty() { @@ -80,7 +79,6 @@ impl serde::Serialize for CompactBlock { } if self.epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochIndex", ToString::to_string(&self.epoch_index).as_str())?; } struct_ser.end() @@ -318,12 +316,10 @@ impl serde::Serialize for CompactBlockRangeRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.compact_block.v1.CompactBlockRangeRequest", len)?; if self.start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startHeight", ToString::to_string(&self.start_height).as_str())?; } if self.end_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endHeight", ToString::to_string(&self.end_height).as_str())?; } if self.keep_alive { @@ -548,7 +544,6 @@ impl serde::Serialize for CompactBlockRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.compact_block.v1.CompactBlockRequest", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.component.dex.v1.rs b/crates/proto/src/gen/penumbra.core.component.dex.v1.rs index 4b907234c2..3878535c9e 100644 --- a/crates/proto/src/gen/penumbra.core.component.dex.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.dex.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// A Penumbra ZK swap proof. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ZkSwapProof { #[prost(bytes = "vec", tag = "1")] @@ -9,13 +9,11 @@ impl ::prost::Name for ZkSwapProof { const NAME: &'static str = "ZKSwapProof"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.ZKSwapProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.ZKSwapProof".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// A Penumbra ZK swap claim proof. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ZkSwapClaimProof { #[prost(bytes = "vec", tag = "1")] @@ -25,13 +23,11 @@ impl ::prost::Name for ZkSwapClaimProof { const NAME: &'static str = "ZKSwapClaimProof"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.ZKSwapClaimProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.ZKSwapClaimProof".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// A transaction action that submits a swap to the dex. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Swap { /// Contains the Swap proof. @@ -45,16 +41,14 @@ impl ::prost::Name for Swap { const NAME: &'static str = "Swap"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.Swap".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.Swap".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// A transaction action that obtains assets previously confirmed /// via a Swap transaction. Does not include a spend authorization /// signature, as it is only capable of consuming the NFT from a /// Swap transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapClaim { /// Contains the SwapClaim proof. @@ -71,13 +65,11 @@ impl ::prost::Name for SwapClaim { const NAME: &'static str = "SwapClaim"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapClaim".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapClaim".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Encapsulates the authorized fields of the SwapClaim action, used in signing. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapClaimBody { /// The nullifier for the Swap commitment to be consumed. @@ -104,13 +96,11 @@ impl ::prost::Name for SwapClaimBody { const NAME: &'static str = "SwapClaimBody"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapClaimBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapClaimBody".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// The authorized data of a Swap transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapBody { /// The trading pair to swap. @@ -138,12 +128,10 @@ impl ::prost::Name for SwapBody { const NAME: &'static str = "SwapBody"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapBody".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapPayload { #[prost(message, optional, tag = "1")] @@ -157,12 +145,10 @@ impl ::prost::Name for SwapPayload { const NAME: &'static str = "SwapPayload"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapPayload".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapPayload".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapPlaintext { /// The trading pair to swap. @@ -188,12 +174,10 @@ impl ::prost::Name for SwapPlaintext { const NAME: &'static str = "SwapPlaintext"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapPlaintext".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapPlaintext".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapPlan { /// The plaintext version of the swap to be performed. @@ -213,12 +197,10 @@ impl ::prost::Name for SwapPlan { const NAME: &'static str = "SwapPlan"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapPlan".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapClaimPlan { /// The plaintext version of the swap to be performed. @@ -244,12 +226,10 @@ impl ::prost::Name for SwapClaimPlan { const NAME: &'static str = "SwapClaimPlan"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapClaimPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapClaimPlan".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapView { #[prost(oneof = "swap_view::SwapView", tags = "1, 2")] @@ -257,6 +237,7 @@ pub struct SwapView { } /// Nested message and enum types in `SwapView`. pub mod swap_view { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Visible { /// The underlying Swap action being viewed. @@ -314,12 +295,12 @@ pub mod swap_view { const NAME: &'static str = "Visible"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapView.Visible".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapView.Visible".into() + ::prost::alloc::format!( + "penumbra.core.component.dex.v1.SwapView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Opaque { #[prost(message, optional, tag = "1")] @@ -364,12 +345,12 @@ pub mod swap_view { const NAME: &'static str = "Opaque"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapView.Opaque".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapView.Opaque".into() + ::prost::alloc::format!( + "penumbra.core.component.dex.v1.SwapView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum SwapView { #[prost(message, tag = "1")] @@ -382,12 +363,10 @@ impl ::prost::Name for SwapView { const NAME: &'static str = "SwapView"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapView".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapClaimView { #[prost(oneof = "swap_claim_view::SwapClaimView", tags = "1, 2")] @@ -395,6 +374,7 @@ pub struct SwapClaimView { } /// Nested message and enum types in `SwapClaimView`. pub mod swap_claim_view { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Visible { #[prost(message, optional, tag = "1")] @@ -421,12 +401,12 @@ pub mod swap_claim_view { const NAME: &'static str = "Visible"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapClaimView.Visible".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapClaimView.Visible".into() + ::prost::alloc::format!( + "penumbra.core.component.dex.v1.SwapClaimView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Opaque { #[prost(message, optional, tag = "1")] @@ -436,12 +416,12 @@ pub mod swap_claim_view { const NAME: &'static str = "Opaque"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapClaimView.Opaque".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapClaimView.Opaque".into() + ::prost::alloc::format!( + "penumbra.core.component.dex.v1.SwapClaimView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum SwapClaimView { #[prost(message, tag = "1")] @@ -454,15 +434,13 @@ impl ::prost::Name for SwapClaimView { const NAME: &'static str = "SwapClaimView"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapClaimView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapClaimView".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Holds two asset IDs. Ordering doesn't reflect trading direction. Instead, we /// require `asset_1 < asset_2` as field elements, to ensure a canonical /// representation of an unordered pair. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TradingPair { /// The first asset of the pair. @@ -476,14 +454,12 @@ impl ::prost::Name for TradingPair { const NAME: &'static str = "TradingPair"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.TradingPair".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.TradingPair".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Encodes a trading pair starting from asset `start` /// and ending on asset `end`. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DirectedTradingPair { /// The start asset of the pair. @@ -497,16 +473,14 @@ impl ::prost::Name for DirectedTradingPair { const NAME: &'static str = "DirectedTradingPair"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.DirectedTradingPair".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.DirectedTradingPair".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Records the result of a batch swap on-chain. /// /// Used as a public input to a swap claim proof, as it implies the effective /// clearing price for the batch. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BatchSwapOutputData { /// The total amount of asset 1 that was input to the batch swap. @@ -545,16 +519,14 @@ impl ::prost::Name for BatchSwapOutputData { const NAME: &'static str = "BatchSwapOutputData"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.BatchSwapOutputData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.BatchSwapOutputData".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// The trading function for a specific pair. /// For a pair (asset_1, asset_2), a trading function is defined by: /// `phi(R) = p*R_1 + q*R_2` and `gamma = 1 - fee`. /// The trading function is frequently referred to as "phi". +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TradingFunction { #[prost(message, optional, tag = "1")] @@ -566,10 +538,7 @@ impl ::prost::Name for TradingFunction { const NAME: &'static str = "TradingFunction"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.TradingFunction".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.TradingFunction".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// The minimum amount of data describing a trading function. @@ -577,7 +546,8 @@ impl ::prost::Name for TradingFunction { /// This implicitly treats the trading function as being between assets 1 and 2, /// without specifying what those assets are, to avoid duplicating data (each /// asset ID alone is twice the size of the trading function). -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct BareTradingFunction { #[prost(uint32, tag = "1")] pub fee: u32, @@ -592,10 +562,7 @@ impl ::prost::Name for BareTradingFunction { const NAME: &'static str = "BareTradingFunction"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.BareTradingFunction".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.BareTradingFunction".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// The reserves of a position. @@ -604,7 +571,8 @@ impl ::prost::Name for BareTradingFunction { /// between assets 1 and 2, without specifying what those assets are, to avoid /// duplicating data (each asset ID alone is four times the size of the /// reserves). -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Reserves { #[prost(message, optional, tag = "1")] pub r1: ::core::option::Option, @@ -615,13 +583,11 @@ impl ::prost::Name for Reserves { const NAME: &'static str = "Reserves"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.Reserves".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.Reserves".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Data identifying a position. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Position { #[prost(message, optional, tag = "1")] @@ -646,13 +612,11 @@ impl ::prost::Name for Position { const NAME: &'static str = "Position"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.Position".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.Position".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// A hash of a `Position`. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionId { /// The bytes of the position ID. @@ -672,14 +636,12 @@ impl ::prost::Name for PositionId { const NAME: &'static str = "PositionId"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.PositionId".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.PositionId".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// The state of a position. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionState { #[prost(enumeration = "position_state::PositionStateEnum", tag = "1")] pub state: i32, @@ -727,11 +689,11 @@ pub mod position_state { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "POSITION_STATE_ENUM_UNSPECIFIED", - Self::Opened => "POSITION_STATE_ENUM_OPENED", - Self::Closed => "POSITION_STATE_ENUM_CLOSED", - Self::Withdrawn => "POSITION_STATE_ENUM_WITHDRAWN", - Self::Claimed => "POSITION_STATE_ENUM_CLAIMED", + PositionStateEnum::Unspecified => "POSITION_STATE_ENUM_UNSPECIFIED", + PositionStateEnum::Opened => "POSITION_STATE_ENUM_OPENED", + PositionStateEnum::Closed => "POSITION_STATE_ENUM_CLOSED", + PositionStateEnum::Withdrawn => "POSITION_STATE_ENUM_WITHDRAWN", + PositionStateEnum::Claimed => "POSITION_STATE_ENUM_CLAIMED", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -751,10 +713,7 @@ impl ::prost::Name for PositionState { const NAME: &'static str = "PositionState"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.PositionState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.PositionState".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// An LPNFT tracking both ownership and state of a position. @@ -772,6 +731,7 @@ impl ::prost::Name for PositionState { /// require any cryptographic implementation (proofs, signatures, etc), other /// than hooking into the value commitment mechanism used for transaction /// balances. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LpNft { #[prost(message, optional, tag = "1")] @@ -783,16 +743,14 @@ impl ::prost::Name for LpNft { const NAME: &'static str = "LpNft"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LpNft".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LpNft".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// A transaction action that opens a new position. /// /// This action's contribution to the transaction's value balance is to consume /// the initial reserves and contribute an opened position NFT. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionOpen { /// Contains the data defining the position, sufficient to compute its `PositionId`. @@ -806,10 +764,7 @@ impl ::prost::Name for PositionOpen { const NAME: &'static str = "PositionOpen"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.PositionOpen".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.PositionOpen".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// A transaction action that closes a position. @@ -821,6 +776,7 @@ impl ::prost::Name for PositionOpen { /// transactions (like any ZK transaction model) are early-binding: the prover /// must know the state transition they prove knowledge of, and they cannot know /// the final reserves with certainty until after the position has been deactivated. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionClose { #[prost(message, optional, tag = "1")] @@ -830,10 +786,7 @@ impl ::prost::Name for PositionClose { const NAME: &'static str = "PositionClose"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.PositionClose".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.PositionClose".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// A transaction action that withdraws funds from a closed position. @@ -841,6 +794,7 @@ impl ::prost::Name for PositionClose { /// This action's contribution to the transaction's value balance is to consume a /// closed position NFT and contribute a withdrawn position NFT, as well as all /// of the funds that were in the position at the time of closing. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionWithdraw { #[prost(message, optional, tag = "1")] @@ -862,13 +816,11 @@ impl ::prost::Name for PositionWithdraw { const NAME: &'static str = "PositionWithdraw"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.PositionWithdraw".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.PositionWithdraw".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Deprecated. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionRewardClaim { #[prost(message, optional, tag = "1")] @@ -882,13 +834,11 @@ impl ::prost::Name for PositionRewardClaim { const NAME: &'static str = "PositionRewardClaim"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.PositionRewardClaim".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.PositionRewardClaim".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Contains the entire execution of a particular swap. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapExecution { #[prost(message, repeated, tag = "1")] @@ -903,6 +853,7 @@ pub struct SwapExecution { /// Nested message and enum types in `SwapExecution`. pub mod swap_execution { /// Contains all individual steps consisting of a trade trace. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Trace { /// Each step in the trade trace. @@ -915,10 +866,9 @@ pub mod swap_execution { const NAME: &'static str = "Trace"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapExecution.Trace".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapExecution.Trace".into() + ::prost::alloc::format!( + "penumbra.core.component.dex.v1.SwapExecution.{}", Self::NAME + ) } } } @@ -926,13 +876,11 @@ impl ::prost::Name for SwapExecution { const NAME: &'static str = "SwapExecution"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapExecution".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapExecution".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Contains private and public data for withdrawing funds from a closed position. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionWithdrawPlan { #[prost(message, optional, tag = "1")] @@ -952,14 +900,12 @@ impl ::prost::Name for PositionWithdrawPlan { const NAME: &'static str = "PositionWithdrawPlan"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.PositionWithdrawPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.PositionWithdrawPlan".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Deprecated. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionRewardClaimPlan { #[prost(message, optional, tag = "1")] pub reserves: ::core::option::Option, @@ -968,13 +914,11 @@ impl ::prost::Name for PositionRewardClaimPlan { const NAME: &'static str = "PositionRewardClaimPlan"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.PositionRewardClaimPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.PositionRewardClaimPlan".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Requests batch swap data associated with a given height and trading pair from the view service. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BatchSwapOutputDataRequest { #[prost(uint64, tag = "2")] @@ -986,12 +930,10 @@ impl ::prost::Name for BatchSwapOutputDataRequest { const NAME: &'static str = "BatchSwapOutputDataRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.BatchSwapOutputDataRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.BatchSwapOutputDataRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BatchSwapOutputDataResponse { #[prost(message, optional, tag = "1")] @@ -1001,12 +943,10 @@ impl ::prost::Name for BatchSwapOutputDataResponse { const NAME: &'static str = "BatchSwapOutputDataResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.BatchSwapOutputDataResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.BatchSwapOutputDataResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapExecutionRequest { #[prost(uint64, tag = "2")] @@ -1018,12 +958,10 @@ impl ::prost::Name for SwapExecutionRequest { const NAME: &'static str = "SwapExecutionRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapExecutionRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapExecutionRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapExecutionResponse { #[prost(message, optional, tag = "1")] @@ -1033,13 +971,11 @@ impl ::prost::Name for SwapExecutionResponse { const NAME: &'static str = "SwapExecutionResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapExecutionResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapExecutionResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ArbExecutionRequest { #[prost(uint64, tag = "2")] pub height: u64, @@ -1048,12 +984,10 @@ impl ::prost::Name for ArbExecutionRequest { const NAME: &'static str = "ArbExecutionRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.ArbExecutionRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.ArbExecutionRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ArbExecutionResponse { #[prost(message, optional, tag = "1")] @@ -1065,12 +999,10 @@ impl ::prost::Name for ArbExecutionResponse { const NAME: &'static str = "ArbExecutionResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.ArbExecutionResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.ArbExecutionResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapExecutionsRequest { /// If present, only return swap executions occurring after the given height. @@ -1087,12 +1019,10 @@ impl ::prost::Name for SwapExecutionsRequest { const NAME: &'static str = "SwapExecutionsRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapExecutionsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapExecutionsRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapExecutionsResponse { #[prost(message, optional, tag = "1")] @@ -1106,13 +1036,11 @@ impl ::prost::Name for SwapExecutionsResponse { const NAME: &'static str = "SwapExecutionsResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SwapExecutionsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SwapExecutionsResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ArbExecutionsRequest { /// If present, only return arb executions occurring after the given height. #[prost(uint64, tag = "2")] @@ -1125,12 +1053,10 @@ impl ::prost::Name for ArbExecutionsRequest { const NAME: &'static str = "ArbExecutionsRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.ArbExecutionsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.ArbExecutionsRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ArbExecutionsResponse { #[prost(message, optional, tag = "1")] @@ -1142,13 +1068,11 @@ impl ::prost::Name for ArbExecutionsResponse { const NAME: &'static str = "ArbExecutionsResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.ArbExecutionsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.ArbExecutionsResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct LiquidityPositionsRequest { /// If true, include closed and withdrawn positions. #[prost(bool, tag = "4")] @@ -1158,12 +1082,10 @@ impl ::prost::Name for LiquidityPositionsRequest { const NAME: &'static str = "LiquidityPositionsRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LiquidityPositionsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LiquidityPositionsRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LiquidityPositionsResponse { #[prost(message, optional, tag = "1")] @@ -1173,12 +1095,10 @@ impl ::prost::Name for LiquidityPositionsResponse { const NAME: &'static str = "LiquidityPositionsResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LiquidityPositionsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LiquidityPositionsResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LiquidityPositionByIdRequest { #[prost(message, optional, tag = "2")] @@ -1188,12 +1108,10 @@ impl ::prost::Name for LiquidityPositionByIdRequest { const NAME: &'static str = "LiquidityPositionByIdRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LiquidityPositionByIdRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LiquidityPositionByIdRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LiquidityPositionByIdResponse { #[prost(message, optional, tag = "1")] @@ -1203,12 +1121,10 @@ impl ::prost::Name for LiquidityPositionByIdResponse { const NAME: &'static str = "LiquidityPositionByIdResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LiquidityPositionByIdResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LiquidityPositionByIdResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LiquidityPositionsByIdRequest { #[prost(message, repeated, tag = "2")] @@ -1218,12 +1134,10 @@ impl ::prost::Name for LiquidityPositionsByIdRequest { const NAME: &'static str = "LiquidityPositionsByIdRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LiquidityPositionsByIdRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LiquidityPositionsByIdRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LiquidityPositionsByIdResponse { #[prost(message, optional, tag = "1")] @@ -1233,12 +1147,10 @@ impl ::prost::Name for LiquidityPositionsByIdResponse { const NAME: &'static str = "LiquidityPositionsByIdResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LiquidityPositionsByIdResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LiquidityPositionsByIdResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LiquidityPositionsByPriceRequest { /// The directed trading pair to request positions for @@ -1252,12 +1164,10 @@ impl ::prost::Name for LiquidityPositionsByPriceRequest { const NAME: &'static str = "LiquidityPositionsByPriceRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LiquidityPositionsByPriceRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LiquidityPositionsByPriceRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LiquidityPositionsByPriceResponse { #[prost(message, optional, tag = "1")] @@ -1269,12 +1179,10 @@ impl ::prost::Name for LiquidityPositionsByPriceResponse { const NAME: &'static str = "LiquidityPositionsByPriceResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.LiquidityPositionsByPriceResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.LiquidityPositionsByPriceResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpreadRequest { #[prost(message, optional, tag = "2")] @@ -1284,12 +1192,10 @@ impl ::prost::Name for SpreadRequest { const NAME: &'static str = "SpreadRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SpreadRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SpreadRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpreadResponse { /// The best position when trading 1 => 2. @@ -1309,12 +1215,10 @@ impl ::prost::Name for SpreadResponse { const NAME: &'static str = "SpreadResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SpreadResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SpreadResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SimulateTradeRequest { #[prost(message, optional, tag = "1")] @@ -1326,42 +1230,42 @@ pub struct SimulateTradeRequest { } /// Nested message and enum types in `SimulateTradeRequest`. pub mod simulate_trade_request { - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Routing { #[prost(oneof = "routing::Setting", tags = "1, 2")] pub setting: ::core::option::Option, } /// Nested message and enum types in `Routing`. pub mod routing { - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct SingleHop {} impl ::prost::Name for SingleHop { const NAME: &'static str = "SingleHop"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SimulateTradeRequest.Routing.SingleHop" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SimulateTradeRequest.Routing.SingleHop" - .into() + ::prost::alloc::format!( + "penumbra.core.component.dex.v1.SimulateTradeRequest.Routing.{}", + Self::NAME + ) } } - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Default {} impl ::prost::Name for Default { const NAME: &'static str = "Default"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SimulateTradeRequest.Routing.Default" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SimulateTradeRequest.Routing.Default" - .into() + ::prost::alloc::format!( + "penumbra.core.component.dex.v1.SimulateTradeRequest.Routing.{}", + Self::NAME + ) } } - #[derive(Clone, Copy, PartialEq, ::prost::Oneof)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Setting { #[prost(message, tag = "1")] Default(Default), @@ -1373,10 +1277,9 @@ pub mod simulate_trade_request { const NAME: &'static str = "Routing"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SimulateTradeRequest.Routing".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SimulateTradeRequest.Routing".into() + ::prost::alloc::format!( + "penumbra.core.component.dex.v1.SimulateTradeRequest.{}", Self::NAME + ) } } } @@ -1384,12 +1287,10 @@ impl ::prost::Name for SimulateTradeRequest { const NAME: &'static str = "SimulateTradeRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SimulateTradeRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SimulateTradeRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SimulateTradeResponse { #[prost(message, optional, tag = "1")] @@ -1402,12 +1303,10 @@ impl ::prost::Name for SimulateTradeResponse { const NAME: &'static str = "SimulateTradeResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.SimulateTradeResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.SimulateTradeResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventSwap { /// The trading pair to swap. @@ -1429,12 +1328,10 @@ impl ::prost::Name for EventSwap { const NAME: &'static str = "EventSwap"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventSwap".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventSwap".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventSwapClaim { /// The trading pair that is subject of the swap claim. @@ -1458,12 +1355,10 @@ impl ::prost::Name for EventSwapClaim { const NAME: &'static str = "EventSwapClaim"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventSwapClaim".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventSwapClaim".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventPositionOpen { /// Position ID. @@ -1492,12 +1387,10 @@ impl ::prost::Name for EventPositionOpen { const NAME: &'static str = "EventPositionOpen"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventPositionOpen".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventPositionOpen".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventPositionClose { /// The ID of the closed position @@ -1508,12 +1401,10 @@ impl ::prost::Name for EventPositionClose { const NAME: &'static str = "EventPositionClose"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventPositionClose".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventPositionClose".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventQueuePositionClose { /// The ID of the position queued that is closed for closure. @@ -1524,12 +1415,10 @@ impl ::prost::Name for EventQueuePositionClose { const NAME: &'static str = "EventQueuePositionClose"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventQueuePositionClose".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventQueuePositionClose".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventPositionWithdraw { /// The ID of the withdrawn position. @@ -1552,12 +1441,10 @@ impl ::prost::Name for EventPositionWithdraw { const NAME: &'static str = "EventPositionWithdraw"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventPositionWithdraw".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventPositionWithdraw".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventPositionExecution { /// The ID of the position executed against. @@ -1586,12 +1473,10 @@ impl ::prost::Name for EventPositionExecution { const NAME: &'static str = "EventPositionExecution"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventPositionExecution".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventPositionExecution".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventBatchSwap { /// The BatchSwapOutputData containing the results of the batch swap. @@ -1608,12 +1493,10 @@ impl ::prost::Name for EventBatchSwap { const NAME: &'static str = "EventBatchSwap"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventBatchSwap".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventBatchSwap".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventArbExecution { /// The height at which the arb execution occurred. @@ -1627,13 +1510,11 @@ impl ::prost::Name for EventArbExecution { const NAME: &'static str = "EventArbExecution"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventArbExecution".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventArbExecution".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Indicates that value was added to the DEX. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValueCircuitBreakerCredit { /// The asset ID being deposited into the DEX. @@ -1650,13 +1531,11 @@ impl ::prost::Name for EventValueCircuitBreakerCredit { const NAME: &'static str = "EventValueCircuitBreakerCredit"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventValueCircuitBreakerCredit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventValueCircuitBreakerCredit".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Indicates that value is leaving the DEX. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValueCircuitBreakerDebit { /// The asset ID being deposited into the DEX. @@ -1673,16 +1552,14 @@ impl ::prost::Name for EventValueCircuitBreakerDebit { const NAME: &'static str = "EventValueCircuitBreakerDebit"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventValueCircuitBreakerDebit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventValueCircuitBreakerDebit".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Emitted whenever there's non-empty candlestick data for a particular pair. /// /// Beware: if there's no activity on a given pair, there's no guarantee /// that a candlestick will be emitted. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventCandlestickData { /// The pair the candlestick is for. @@ -1696,12 +1573,10 @@ impl ::prost::Name for EventCandlestickData { const NAME: &'static str = "EventCandlestickData"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.EventCandlestickData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.EventCandlestickData".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DexParameters { /// Whether or not the DEX is enabled. @@ -1729,12 +1604,10 @@ impl ::prost::Name for DexParameters { const NAME: &'static str = "DexParameters"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.DexParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.DexParameters".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// The initial parameters for the DEX. @@ -1745,13 +1618,11 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct CandlestickData { /// The height of the candlestick data. #[prost(uint64, tag = "1")] @@ -1779,12 +1650,10 @@ impl ::prost::Name for CandlestickData { const NAME: &'static str = "CandlestickData"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.CandlestickData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.CandlestickData".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CandlestickDataRequest { /// The directed trading pair to request candlestick data for. @@ -1809,12 +1678,10 @@ impl ::prost::Name for CandlestickDataRequest { const NAME: &'static str = "CandlestickDataRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.CandlestickDataRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.CandlestickDataRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CandlestickDataResponse { /// The candlestick data points. @@ -1825,12 +1692,10 @@ impl ::prost::Name for CandlestickDataResponse { const NAME: &'static str = "CandlestickDataResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.CandlestickDataResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.CandlestickDataResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CandlestickDataStreamRequest { /// The directed trading pair to subscribe to. @@ -1841,13 +1706,11 @@ impl ::prost::Name for CandlestickDataStreamRequest { const NAME: &'static str = "CandlestickDataStreamRequest"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.CandlestickDataStreamRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.CandlestickDataStreamRequest".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct CandlestickDataStreamResponse { /// The candlestick data point. #[prost(message, optional, tag = "1")] @@ -1857,22 +1720,13 @@ impl ::prost::Name for CandlestickDataStreamResponse { const NAME: &'static str = "CandlestickDataStreamResponse"; const PACKAGE: &'static str = "penumbra.core.component.dex.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.dex.v1.CandlestickDataStreamResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.dex.v1.CandlestickDataStreamResponse".into() + ::prost::alloc::format!("penumbra.core.component.dex.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the DEX component. @@ -1895,8 +1749,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -1921,7 +1775,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -1968,7 +1822,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1998,7 +1853,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2028,7 +1884,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2058,7 +1915,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2088,7 +1946,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2118,7 +1977,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2150,7 +2010,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2182,7 +2043,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2214,7 +2076,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2244,7 +2107,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2276,7 +2140,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2308,7 +2173,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2331,13 +2197,7 @@ pub mod query_service_client { /// Generated client implementations. #[cfg(feature = "rpc")] pub mod simulation_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Simulation for the DEX component. @@ -2364,8 +2224,8 @@ pub mod simulation_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -2390,7 +2250,7 @@ pub mod simulation_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { SimulationServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -2437,7 +2297,8 @@ pub mod simulation_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2460,17 +2321,11 @@ pub mod simulation_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { /// Get the batch clearing prices for a specific block height and trading pair. async fn batch_swap_output_data( &self, @@ -2499,7 +2354,7 @@ pub mod query_service_server { type SwapExecutionsStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Stream all swap executions over a range of heights, optionally subscribing to future executions. async fn swap_executions( @@ -2513,7 +2368,7 @@ pub mod query_service_server { type ArbExecutionsStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Stream all arbitrage executions over a range of heights, optionally subscribing to future executions. async fn arb_executions( @@ -2530,7 +2385,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Query all liquidity positions on the DEX. async fn liquidity_positions( @@ -2557,7 +2412,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Query multiple liquidity positions by ID. async fn liquidity_positions_by_id( @@ -2574,7 +2429,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Query liquidity positions on a specific pair, sorted by effective price. async fn liquidity_positions_by_price( @@ -2609,7 +2464,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Subscribe to candlestick data updates. async fn candlestick_data_stream( @@ -2622,18 +2477,20 @@ pub mod query_service_server { } /// Query operations for the DEX component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -2683,8 +2540,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -2696,6 +2553,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.dex.v1.QueryService/BatchSwapOutputData" => { #[allow(non_camel_case_types)] @@ -2727,6 +2585,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = BatchSwapOutputDataSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2772,6 +2631,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SwapExecutionSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2817,6 +2677,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ArbExecutionSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2863,6 +2724,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SwapExecutionsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2909,6 +2771,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ArbExecutionsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2957,6 +2820,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = LiquidityPositionsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3006,6 +2870,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = LiquidityPositionByIdSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3057,6 +2922,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = LiquidityPositionsByIdSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3110,6 +2976,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = LiquidityPositionsByPriceSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3155,6 +3022,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SpreadSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3200,6 +3068,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CandlestickDataSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3251,6 +3120,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CandlestickDataStreamSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3269,25 +3139,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -3299,26 +3164,28 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.dex.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.dex.v1.QueryService"; } } /// Generated server implementations. #[cfg(feature = "rpc")] pub mod simulation_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with SimulationServiceServer. #[async_trait] - pub trait SimulationService: std::marker::Send + std::marker::Sync + 'static { + pub trait SimulationService: Send + Sync + 'static { /// Simulate routing and trade execution. async fn simulate_trade( &self, @@ -3334,18 +3201,20 @@ pub mod simulation_service_server { /// simple read query from the state. Thus it poses greater DoS risks, and node /// operators may want to enable it separately. #[derive(Debug)] - pub struct SimulationServiceServer { - inner: Arc, + pub struct SimulationServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl SimulationServiceServer { + struct _Inner(Arc); + impl SimulationServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -3395,8 +3264,8 @@ pub mod simulation_service_server { impl tonic::codegen::Service> for SimulationServiceServer where T: SimulationService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -3408,6 +3277,7 @@ pub mod simulation_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.dex.v1.SimulationService/SimulateTrade" => { #[allow(non_camel_case_types)] @@ -3439,6 +3309,7 @@ pub mod simulation_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SimulateTradeSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3457,25 +3328,20 @@ pub mod simulation_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for SimulationServiceServer { + impl Clone for SimulationServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -3487,9 +3353,18 @@ pub mod simulation_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.dex.v1.SimulationService"; - impl tonic::server::NamedService for SimulationServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService + for SimulationServiceServer { + const NAME: &'static str = "penumbra.core.component.dex.v1.SimulationService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.dex.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.dex.v1.serde.rs index e15c31793c..cb4fbff28d 100644 --- a/crates/proto/src/gen/penumbra.core.component.dex.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.dex.v1.serde.rs @@ -12,7 +12,6 @@ impl serde::Serialize for ArbExecutionRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.ArbExecutionRequest", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() @@ -117,7 +116,6 @@ impl serde::Serialize for ArbExecutionResponse { } if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() @@ -231,12 +229,10 @@ impl serde::Serialize for ArbExecutionsRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.ArbExecutionsRequest", len)?; if self.start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startHeight", ToString::to_string(&self.start_height).as_str())?; } if self.end_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endHeight", ToString::to_string(&self.end_height).as_str())?; } struct_ser.end() @@ -356,7 +352,6 @@ impl serde::Serialize for ArbExecutionsResponse { } if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() @@ -643,7 +638,6 @@ impl serde::Serialize for BatchSwapOutputData { } if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if let Some(v) = self.trading_pair.as_ref() { @@ -651,12 +645,10 @@ impl serde::Serialize for BatchSwapOutputData { } if self.epoch_starting_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochStartingHeight", ToString::to_string(&self.epoch_starting_height).as_str())?; } if self.sct_position_prefix != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sctPositionPrefix", ToString::to_string(&self.sct_position_prefix).as_str())?; } struct_ser.end() @@ -870,7 +862,6 @@ impl serde::Serialize for BatchSwapOutputDataRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.BatchSwapOutputDataRequest", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if let Some(v) = self.trading_pair.as_ref() { @@ -1097,7 +1088,6 @@ impl serde::Serialize for CandlestickData { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.CandlestickData", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if self.open != 0. { @@ -1303,12 +1293,10 @@ impl serde::Serialize for CandlestickDataRequest { } if self.limit != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("limit", ToString::to_string(&self.limit).as_str())?; } if self.start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startHeight", ToString::to_string(&self.start_height).as_str())?; } struct_ser.end() @@ -2006,7 +1994,6 @@ impl serde::Serialize for EventArbExecution { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.EventArbExecution", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if let Some(v) = self.swap_execution.as_ref() { @@ -2874,7 +2861,6 @@ impl serde::Serialize for EventPositionWithdraw { } if self.sequence != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sequence", ToString::to_string(&self.sequence).as_str())?; } struct_ser.end() @@ -4164,7 +4150,6 @@ impl serde::Serialize for LiquidityPositionsByPriceRequest { } if self.limit != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("limit", ToString::to_string(&self.limit).as_str())?; } struct_ser.end() @@ -4706,7 +4691,6 @@ impl serde::Serialize for Position { } if !self.nonce.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nonce", pbjson::private::base64::encode(&self.nonce).as_str())?; } if let Some(v) = self.state.as_ref() { @@ -4958,7 +4942,6 @@ impl serde::Serialize for PositionId { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.PositionId", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } if !self.alt_bech32m.is_empty() { @@ -5384,7 +5367,6 @@ impl serde::Serialize for PositionState { } if self.sequence != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sequence", ToString::to_string(&self.sequence).as_str())?; } struct_ser.end() @@ -5586,7 +5568,6 @@ impl serde::Serialize for PositionWithdraw { } if self.sequence != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sequence", ToString::to_string(&self.sequence).as_str())?; } struct_ser.end() @@ -5730,7 +5711,6 @@ impl serde::Serialize for PositionWithdrawPlan { } if self.sequence != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sequence", ToString::to_string(&self.sequence).as_str())?; } if !self.rewards.is_empty() { @@ -7029,7 +7009,6 @@ impl serde::Serialize for SwapClaim { } if self.epoch_duration != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochDuration", ToString::to_string(&self.epoch_duration).as_str())?; } struct_ser.end() @@ -7335,7 +7314,6 @@ impl serde::Serialize for SwapClaimPlan { } if self.position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("position", ToString::to_string(&self.position).as_str())?; } if let Some(v) = self.output_data.as_ref() { @@ -7343,17 +7321,14 @@ impl serde::Serialize for SwapClaimPlan { } if self.epoch_duration != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochDuration", ToString::to_string(&self.epoch_duration).as_str())?; } if !self.proof_blinding_r.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingR", pbjson::private::base64::encode(&self.proof_blinding_r).as_str())?; } if !self.proof_blinding_s.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingS", pbjson::private::base64::encode(&self.proof_blinding_s).as_str())?; } struct_ser.end() @@ -8104,7 +8079,6 @@ impl serde::Serialize for SwapExecutionRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.SwapExecutionRequest", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if let Some(v) = self.trading_pair.as_ref() { @@ -8320,12 +8294,10 @@ impl serde::Serialize for SwapExecutionsRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.SwapExecutionsRequest", len)?; if self.start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startHeight", ToString::to_string(&self.start_height).as_str())?; } if self.end_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endHeight", ToString::to_string(&self.end_height).as_str())?; } if let Some(v) = self.trading_pair.as_ref() { @@ -8463,7 +8435,6 @@ impl serde::Serialize for SwapExecutionsResponse { } if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if let Some(v) = self.trading_pair.as_ref() { @@ -8595,7 +8566,6 @@ impl serde::Serialize for SwapPayload { } if !self.encrypted_swap.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("encryptedSwap", pbjson::private::base64::encode(&self.encrypted_swap).as_str())?; } struct_ser.end() @@ -8736,7 +8706,6 @@ impl serde::Serialize for SwapPlaintext { } if !self.rseed.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("rseed", pbjson::private::base64::encode(&self.rseed).as_str())?; } struct_ser.end() @@ -8907,17 +8876,14 @@ impl serde::Serialize for SwapPlan { } if !self.fee_blinding.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("feeBlinding", pbjson::private::base64::encode(&self.fee_blinding).as_str())?; } if !self.proof_blinding_r.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingR", pbjson::private::base64::encode(&self.proof_blinding_r).as_str())?; } if !self.proof_blinding_s.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingS", pbjson::private::base64::encode(&self.proof_blinding_s).as_str())?; } struct_ser.end() @@ -9802,7 +9768,6 @@ impl serde::Serialize for ZkSwapClaimProof { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.ZKSwapClaimProof", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -9901,7 +9866,6 @@ impl serde::Serialize for ZkSwapProof { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.dex.v1.ZKSwapProof", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.component.distributions.v1.rs b/crates/proto/src/gen/penumbra.core.component.distributions.v1.rs index 8ad924095d..8e9c616aad 100644 --- a/crates/proto/src/gen/penumbra.core.component.distributions.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.distributions.v1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// Distribution configuration data. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct DistributionsParameters { /// The amount of staking token issued per block. #[prost(uint64, tag = "1")] @@ -10,14 +10,14 @@ impl ::prost::Name for DistributionsParameters { const NAME: &'static str = "DistributionsParameters"; const PACKAGE: &'static str = "penumbra.core.component.distributions.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.distributions.v1.DistributionsParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.distributions.v1.DistributionsParameters".into() + ::prost::alloc::format!( + "penumbra.core.component.distributions.v1.{}", Self::NAME + ) } } /// Genesis data for the distributions module. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { #[prost(message, optional, tag = "1")] pub distributions_params: ::core::option::Option, @@ -26,9 +26,8 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.distributions.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.distributions.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.distributions.v1.GenesisContent".into() + ::prost::alloc::format!( + "penumbra.core.component.distributions.v1.{}", Self::NAME + ) } } diff --git a/crates/proto/src/gen/penumbra.core.component.distributions.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.distributions.v1.serde.rs index efe1238b66..7571336852 100644 --- a/crates/proto/src/gen/penumbra.core.component.distributions.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.distributions.v1.serde.rs @@ -12,7 +12,6 @@ impl serde::Serialize for DistributionsParameters { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.distributions.v1.DistributionsParameters", len)?; if self.staking_issuance_per_block != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("stakingIssuancePerBlock", ToString::to_string(&self.staking_issuance_per_block).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.component.fee.v1.rs b/crates/proto/src/gen/penumbra.core.component.fee.v1.rs index 287db509e6..e3ae15a7b0 100644 --- a/crates/proto/src/gen/penumbra.core.component.fee.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.fee.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// Specifies fees paid by a transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Fee { /// The amount of the token used to pay fees. @@ -14,16 +14,14 @@ impl ::prost::Name for Fee { const NAME: &'static str = "Fee"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.Fee".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.Fee".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } /// Gas usage for a transaction. /// /// Gas used is multiplied by `GasPrices` to determine a `Fee`. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Gas { /// The amount of block space used. #[prost(uint64, tag = "1")] @@ -42,12 +40,10 @@ impl ::prost::Name for Gas { const NAME: &'static str = "Gas"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.Gas".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.Gas".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GasPrices { /// The asset ID of the fee token these prices are for. @@ -72,13 +68,11 @@ impl ::prost::Name for GasPrices { const NAME: &'static str = "GasPrices"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.GasPrices".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.GasPrices".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct FeeTier { /// The selected fee tier. #[prost(enumeration = "fee_tier::Tier", tag = "1")] @@ -112,10 +106,10 @@ pub mod fee_tier { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "TIER_UNSPECIFIED", - Self::Low => "TIER_LOW", - Self::Medium => "TIER_MEDIUM", - Self::High => "TIER_HIGH", + Tier::Unspecified => "TIER_UNSPECIFIED", + Tier::Low => "TIER_LOW", + Tier::Medium => "TIER_MEDIUM", + Tier::High => "TIER_HIGH", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -134,13 +128,11 @@ impl ::prost::Name for FeeTier { const NAME: &'static str = "FeeTier"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.FeeTier".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.FeeTier".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } /// Fee component configuration data. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct FeeParameters { /// Fixed gas prices in the native token used to compute transactions' base @@ -165,13 +157,11 @@ impl ::prost::Name for FeeParameters { const NAME: &'static str = "FeeParameters"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.FeeParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.FeeParameters".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } /// Fee-specific genesis content. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// The FeeParameters present at genesis. @@ -182,24 +172,20 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct CurrentGasPricesRequest {} impl ::prost::Name for CurrentGasPricesRequest { const NAME: &'static str = "CurrentGasPricesRequest"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.CurrentGasPricesRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.CurrentGasPricesRequest".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CurrentGasPricesResponse { /// The current gas prices, in the preferred (native) token. @@ -213,13 +199,11 @@ impl ::prost::Name for CurrentGasPricesResponse { const NAME: &'static str = "CurrentGasPricesResponse"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.CurrentGasPricesResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.CurrentGasPricesResponse".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } /// Emitted during fee payment. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventPaidFee { /// The fee paid. @@ -239,13 +223,11 @@ impl ::prost::Name for EventPaidFee { const NAME: &'static str = "EventPaidFee"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.EventPaidFee".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.EventPaidFee".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } /// Emitted as a summary of fees in the block. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventBlockFees { /// The total fees, after swapping to the native token. @@ -262,22 +244,13 @@ impl ::prost::Name for EventBlockFees { const NAME: &'static str = "EventBlockFees"; const PACKAGE: &'static str = "penumbra.core.component.fee.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.fee.v1.EventBlockFees".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.fee.v1.EventBlockFees".into() + ::prost::alloc::format!("penumbra.core.component.fee.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the fee component. @@ -300,8 +273,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -326,7 +299,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -373,7 +346,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -396,17 +370,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { /// Get the current gas prices. async fn current_gas_prices( &self, @@ -418,18 +386,20 @@ pub mod query_service_server { } /// Query operations for the fee component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -479,8 +449,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -492,6 +462,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.fee.v1.QueryService/CurrentGasPrices" => { #[allow(non_camel_case_types)] @@ -523,6 +494,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CurrentGasPricesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -541,25 +513,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -571,9 +538,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.fee.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.fee.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.fee.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.fee.v1.serde.rs index 1d166c08f5..2d9df3b9be 100644 --- a/crates/proto/src/gen/penumbra.core.component.fee.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.fee.v1.serde.rs @@ -889,22 +889,18 @@ impl serde::Serialize for Gas { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.fee.v1.Gas", len)?; if self.block_space != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("blockSpace", ToString::to_string(&self.block_space).as_str())?; } if self.compact_block_space != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("compactBlockSpace", ToString::to_string(&self.compact_block_space).as_str())?; } if self.verification != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("verification", ToString::to_string(&self.verification).as_str())?; } if self.execution != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("execution", ToString::to_string(&self.execution).as_str())?; } struct_ser.end() @@ -1059,22 +1055,18 @@ impl serde::Serialize for GasPrices { } if self.block_space_price != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("blockSpacePrice", ToString::to_string(&self.block_space_price).as_str())?; } if self.compact_block_space_price != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("compactBlockSpacePrice", ToString::to_string(&self.compact_block_space_price).as_str())?; } if self.verification_price != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("verificationPrice", ToString::to_string(&self.verification_price).as_str())?; } if self.execution_price != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("executionPrice", ToString::to_string(&self.execution_price).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.component.funding.v1.rs b/crates/proto/src/gen/penumbra.core.component.funding.v1.rs index d72e5db496..2db78e4ba8 100644 --- a/crates/proto/src/gen/penumbra.core.component.funding.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.funding.v1.rs @@ -1,19 +1,17 @@ -// This file is @generated by prost-build. /// Funding component configuration data. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct FundingParameters {} impl ::prost::Name for FundingParameters { const NAME: &'static str = "FundingParameters"; const PACKAGE: &'static str = "penumbra.core.component.funding.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.funding.v1.FundingParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.funding.v1.FundingParameters".into() + ::prost::alloc::format!("penumbra.core.component.funding.v1.{}", Self::NAME) } } /// Genesis data for the funding component. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { #[prost(message, optional, tag = "1")] pub funding_params: ::core::option::Option, @@ -22,13 +20,11 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.funding.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.funding.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.funding.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.component.funding.v1.{}", Self::NAME) } } /// Indicates that a funding stream reward was paid. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventFundingStreamReward { /// The recipient of the funding stream reward. @@ -48,9 +44,6 @@ impl ::prost::Name for EventFundingStreamReward { const NAME: &'static str = "EventFundingStreamReward"; const PACKAGE: &'static str = "penumbra.core.component.funding.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.funding.v1.EventFundingStreamReward".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.funding.v1.EventFundingStreamReward".into() + ::prost::alloc::format!("penumbra.core.component.funding.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.core.component.funding.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.funding.v1.serde.rs index 827556590e..833eeed457 100644 --- a/crates/proto/src/gen/penumbra.core.component.funding.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.funding.v1.serde.rs @@ -21,7 +21,6 @@ impl serde::Serialize for EventFundingStreamReward { } if self.epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochIndex", ToString::to_string(&self.epoch_index).as_str())?; } if let Some(v) = self.reward_amount.as_ref() { diff --git a/crates/proto/src/gen/penumbra.core.component.governance.v1.rs b/crates/proto/src/gen/penumbra.core.component.governance.v1.rs index d278363f49..4c188cf312 100644 --- a/crates/proto/src/gen/penumbra.core.component.governance.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.governance.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// A Penumbra ZK delegator vote proof. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ZkDelegatorVoteProof { #[prost(bytes = "vec", tag = "1")] @@ -9,12 +9,10 @@ impl ::prost::Name for ZkDelegatorVoteProof { const NAME: &'static str = "ZKDelegatorVoteProof"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ZKDelegatorVoteProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ZKDelegatorVoteProof".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalSubmit { /// The proposal to be submitted. @@ -28,12 +26,10 @@ impl ::prost::Name for ProposalSubmit { const NAME: &'static str = "ProposalSubmit"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalSubmit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalSubmit".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalWithdraw { /// The proposal to be withdrawn. @@ -47,12 +43,10 @@ impl ::prost::Name for ProposalWithdraw { const NAME: &'static str = "ProposalWithdraw"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalWithdraw".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalWithdraw".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalDepositClaim { /// The proposal to claim the deposit for. @@ -69,12 +63,10 @@ impl ::prost::Name for ProposalDepositClaim { const NAME: &'static str = "ProposalDepositClaim"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalDepositClaim".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalDepositClaim".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorVote { /// The effecting data for the vote. @@ -90,12 +82,10 @@ impl ::prost::Name for ValidatorVote { const NAME: &'static str = "ValidatorVote"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ValidatorVote".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ValidatorVote".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorVoteReason { #[prost(string, tag = "1")] @@ -105,12 +95,10 @@ impl ::prost::Name for ValidatorVoteReason { const NAME: &'static str = "ValidatorVoteReason"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ValidatorVoteReason".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ValidatorVoteReason".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorVoteBody { /// The proposal being voted on. @@ -135,12 +123,10 @@ impl ::prost::Name for ValidatorVoteBody { const NAME: &'static str = "ValidatorVoteBody"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ValidatorVoteBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ValidatorVoteBody".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DelegatorVote { /// The effecting data for the vote. @@ -159,12 +145,10 @@ impl ::prost::Name for DelegatorVote { const NAME: &'static str = "DelegatorVote"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.DelegatorVote".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.DelegatorVote".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DelegatorVoteBody { /// The proposal being voted on. @@ -195,12 +179,10 @@ impl ::prost::Name for DelegatorVoteBody { const NAME: &'static str = "DelegatorVoteBody"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.DelegatorVoteBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.DelegatorVoteBody".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DelegatorVoteView { #[prost(oneof = "delegator_vote_view::DelegatorVote", tags = "1, 2")] @@ -208,6 +190,7 @@ pub struct DelegatorVoteView { } /// Nested message and enum types in `DelegatorVoteView`. pub mod delegator_vote_view { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Visible { #[prost(message, optional, tag = "1")] @@ -221,12 +204,12 @@ pub mod delegator_vote_view { const NAME: &'static str = "Visible"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.DelegatorVoteView.Visible".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.DelegatorVoteView.Visible".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.DelegatorVoteView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Opaque { #[prost(message, optional, tag = "1")] @@ -236,12 +219,12 @@ pub mod delegator_vote_view { const NAME: &'static str = "Opaque"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.DelegatorVoteView.Opaque".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.DelegatorVoteView.Opaque".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.DelegatorVoteView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum DelegatorVote { #[prost(message, tag = "1")] @@ -254,12 +237,10 @@ impl ::prost::Name for DelegatorVoteView { const NAME: &'static str = "DelegatorVoteView"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.DelegatorVoteView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.DelegatorVoteView".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DelegatorVotePlan { /// The proposal to vote on. @@ -294,12 +275,10 @@ impl ::prost::Name for DelegatorVotePlan { const NAME: &'static str = "DelegatorVotePlan"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.DelegatorVotePlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.DelegatorVotePlan".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommunityPoolDeposit { /// The value to deposit into the Community Pool. @@ -310,12 +289,10 @@ impl ::prost::Name for CommunityPoolDeposit { const NAME: &'static str = "CommunityPoolDeposit"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.CommunityPoolDeposit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.CommunityPoolDeposit".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommunityPoolSpend { /// The value to spend from the Community Pool. @@ -326,12 +303,10 @@ impl ::prost::Name for CommunityPoolSpend { const NAME: &'static str = "CommunityPoolSpend"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.CommunityPoolSpend".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.CommunityPoolSpend".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommunityPoolOutput { /// The value to output from the Community Pool. @@ -345,14 +320,12 @@ impl ::prost::Name for CommunityPoolOutput { const NAME: &'static str = "CommunityPoolOutput"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.CommunityPoolOutput".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.CommunityPoolOutput".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// A vote on a proposal. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Vote { /// The vote. #[prost(enumeration = "vote::Vote", tag = "1")] @@ -386,10 +359,10 @@ pub mod vote { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "VOTE_UNSPECIFIED", - Self::Abstain => "VOTE_ABSTAIN", - Self::Yes => "VOTE_YES", - Self::No => "VOTE_NO", + Vote::Unspecified => "VOTE_UNSPECIFIED", + Vote::Abstain => "VOTE_ABSTAIN", + Vote::Yes => "VOTE_YES", + Vote::No => "VOTE_NO", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -408,13 +381,11 @@ impl ::prost::Name for Vote { const NAME: &'static str = "Vote"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Vote".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Vote".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// The current state of a proposal. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalState { /// The state of the proposal. @@ -424,19 +395,20 @@ pub struct ProposalState { /// Nested message and enum types in `ProposalState`. pub mod proposal_state { /// Voting is in progress and the proposal has not yet concluded voting or been withdrawn. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Voting {} impl ::prost::Name for Voting { const NAME: &'static str = "Voting"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalState.Voting".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalState.Voting".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.ProposalState.{}", Self::NAME + ) } } /// The proposal has been withdrawn but the voting period is not yet concluded. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Withdrawn { /// The reason for the withdrawal. @@ -447,13 +419,13 @@ pub mod proposal_state { const NAME: &'static str = "Withdrawn"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalState.Withdrawn".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalState.Withdrawn".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.ProposalState.{}", Self::NAME + ) } } /// The voting period has ended, and the proposal has been assigned an outcome. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Finished { #[prost(message, optional, tag = "1")] @@ -463,13 +435,13 @@ pub mod proposal_state { const NAME: &'static str = "Finished"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalState.Finished".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalState.Finished".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.ProposalState.{}", Self::NAME + ) } } /// The voting period has ended, and the original proposer has claimed their deposit. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Claimed { #[prost(message, optional, tag = "1")] @@ -479,13 +451,13 @@ pub mod proposal_state { const NAME: &'static str = "Claimed"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalState.Claimed".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalState.Claimed".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.ProposalState.{}", Self::NAME + ) } } /// The state of the proposal. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum State { #[prost(message, tag = "2")] @@ -502,13 +474,11 @@ impl ::prost::Name for ProposalState { const NAME: &'static str = "ProposalState"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalState".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// The outcome of a concluded proposal. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalOutcome { #[prost(oneof = "proposal_outcome::Outcome", tags = "1, 2, 3")] @@ -517,6 +487,7 @@ pub struct ProposalOutcome { /// Nested message and enum types in `ProposalOutcome`. pub mod proposal_outcome { /// Whether or not the proposal was withdrawn. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Withdrawn { /// The reason for withdrawing the proposal during the voting period. @@ -527,26 +498,26 @@ pub mod proposal_outcome { const NAME: &'static str = "Withdrawn"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalOutcome.Withdrawn".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalOutcome.Withdrawn".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.ProposalOutcome.{}", Self::NAME + ) } } /// The proposal was passed. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Passed {} impl ::prost::Name for Passed { const NAME: &'static str = "Passed"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalOutcome.Passed".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalOutcome.Passed".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.ProposalOutcome.{}", Self::NAME + ) } } /// The proposal did not pass. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Failed { /// Present if the proposal was withdrawn during the voting period. @@ -557,13 +528,13 @@ pub mod proposal_outcome { const NAME: &'static str = "Failed"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalOutcome.Failed".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalOutcome.Failed".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.ProposalOutcome.{}", Self::NAME + ) } } /// The proposal did not pass, and was slashed. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Slashed { /// Present if the proposal was withdrawn during the voting period. @@ -574,12 +545,12 @@ pub mod proposal_outcome { const NAME: &'static str = "Slashed"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalOutcome.Slashed".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalOutcome.Slashed".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.ProposalOutcome.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Outcome { #[prost(message, tag = "1")] @@ -594,14 +565,12 @@ impl ::prost::Name for ProposalOutcome { const NAME: &'static str = "ProposalOutcome"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalOutcome".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalOutcome".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// A tally of votes on a proposal. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Tally { /// The number of votes in favor of the proposal. #[prost(uint64, tag = "1")] @@ -617,13 +586,11 @@ impl ::prost::Name for Tally { const NAME: &'static str = "Tally"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Tally".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Tally".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// A proposal to be voted upon. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Proposal { /// The unique identifier of the proposal. @@ -645,6 +612,7 @@ pub mod proposal { /// effect when passed. /// /// It optionally contains a reference to a commit which contains code to upgrade the chain. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Signaling { /// The commit to be voted upon, if any is relevant. @@ -655,10 +623,9 @@ pub mod proposal { const NAME: &'static str = "Signaling"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Proposal.Signaling".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Proposal.Signaling".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.Proposal.{}", Self::NAME + ) } } /// An emergency proposal can be passed instantaneously by a 1/3 majority of validators, without @@ -666,7 +633,8 @@ pub mod proposal { /// /// If the boolean `halt_chain` is set to `true`, then the chain will halt immediately when the /// proposal is passed. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Emergency { /// If `true`, the chain will halt immediately when the proposal is passed. #[prost(bool, tag = "1")] @@ -676,14 +644,14 @@ pub mod proposal { const NAME: &'static str = "Emergency"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Proposal.Emergency".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Proposal.Emergency".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.Proposal.{}", Self::NAME + ) } } /// A parameter change proposal describes a replacement of the app parameters, which should take /// effect when the proposal is passed. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ParameterChange { /// DEPRECATED @@ -710,15 +678,15 @@ pub mod proposal { const NAME: &'static str = "ParameterChange"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Proposal.ParameterChange".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Proposal.ParameterChange".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.Proposal.{}", Self::NAME + ) } } /// A Community Pool spend proposal describes zero or more transactions to execute on behalf of the Community Pool, with /// access to its funds, and zero or more scheduled transactions from previous passed proposals to /// cancel. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommunityPoolSpend { /// The transaction plan to be executed at the time the proposal is passed. This must be a @@ -731,15 +699,15 @@ pub mod proposal { const NAME: &'static str = "CommunityPoolSpend"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Proposal.CommunityPoolSpend".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Proposal.CommunityPoolSpend".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.Proposal.{}", Self::NAME + ) } } /// An upgrade plan describes a candidate upgrade to be executed at a certain height. If passed, the chain /// will halt at the specified height. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct UpgradePlan { #[prost(uint64, tag = "1")] pub height: u64, @@ -748,15 +716,15 @@ pub mod proposal { const NAME: &'static str = "UpgradePlan"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Proposal.UpgradePlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Proposal.UpgradePlan".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.Proposal.{}", Self::NAME + ) } } /// Freeze an existing IBC client. /// Like `Emergency` proposals, it is enacted immediately after receiving /// +2/3 of validator votes. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct FreezeIbcClient { #[prost(string, tag = "1")] @@ -766,15 +734,15 @@ pub mod proposal { const NAME: &'static str = "FreezeIbcClient"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Proposal.FreezeIbcClient".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Proposal.FreezeIbcClient".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.Proposal.{}", Self::NAME + ) } } /// Unfreeze an existing IBC client. /// Like `Emergency` proposals, it is enacted immediately after receiving /// +2/3 of validator votes. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UnfreezeIbcClient { #[prost(string, tag = "1")] @@ -784,13 +752,13 @@ pub mod proposal { const NAME: &'static str = "UnfreezeIbcClient"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Proposal.UnfreezeIbcClient".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Proposal.UnfreezeIbcClient".into() + ::prost::alloc::format!( + "penumbra.core.component.governance.v1.Proposal.{}", Self::NAME + ) } } /// The proposal's payload. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Payload { #[prost(message, tag = "5")] @@ -813,13 +781,11 @@ impl ::prost::Name for Proposal { const NAME: &'static str = "Proposal"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Proposal".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Proposal".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalInfoRequest { /// The proposal id to request information on. #[prost(uint64, tag = "2")] @@ -829,13 +795,11 @@ impl ::prost::Name for ProposalInfoRequest { const NAME: &'static str = "ProposalInfoRequest"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalInfoRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalInfoRequest".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalInfoResponse { /// The block height at which the proposal started voting. #[prost(uint64, tag = "1")] @@ -848,13 +812,11 @@ impl ::prost::Name for ProposalInfoResponse { const NAME: &'static str = "ProposalInfoResponse"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalInfoResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalInfoResponse".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalDataRequest { /// The proposal id to request information on. #[prost(uint64, tag = "2")] @@ -864,12 +826,10 @@ impl ::prost::Name for ProposalDataRequest { const NAME: &'static str = "ProposalDataRequest"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalDataRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalDataRequest".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalDataResponse { /// The proposal metadata. @@ -897,14 +857,12 @@ impl ::prost::Name for ProposalDataResponse { const NAME: &'static str = "ProposalDataResponse"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalDataResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalDataResponse".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// Requests the validator rate data for a proposal. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalRateDataRequest { /// The proposal id to request information on. #[prost(uint64, tag = "2")] @@ -914,13 +872,11 @@ impl ::prost::Name for ProposalRateDataRequest { const NAME: &'static str = "ProposalRateDataRequest"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalRateDataRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalRateDataRequest".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// The rate data for a single validator. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalRateDataResponse { #[prost(message, optional, tag = "1")] @@ -930,14 +886,12 @@ impl ::prost::Name for ProposalRateDataResponse { const NAME: &'static str = "ProposalRateDataResponse"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalRateDataResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalRateDataResponse".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// Requests the list of all proposals. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalListRequest { /// Whether to include proposals that are no longer active.; /// @@ -949,13 +903,11 @@ impl ::prost::Name for ProposalListRequest { const NAME: &'static str = "ProposalListRequest"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalListRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalListRequest".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// The data for a single proposal. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProposalListResponse { /// The proposal metadata. @@ -978,14 +930,12 @@ impl ::prost::Name for ProposalListResponse { const NAME: &'static str = "ProposalListResponse"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ProposalListResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ProposalListResponse".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// Requests the list of all validator votes for a given proposal. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorVotesRequest { /// The proposal id to request information on. #[prost(uint64, tag = "2")] @@ -995,13 +945,11 @@ impl ::prost::Name for ValidatorVotesRequest { const NAME: &'static str = "ValidatorVotesRequest"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ValidatorVotesRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ValidatorVotesRequest".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// The data for a single validator vote. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorVotesResponse { /// The vote. @@ -1015,13 +963,11 @@ impl ::prost::Name for ValidatorVotesResponse { const NAME: &'static str = "ValidatorVotesResponse"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ValidatorVotesResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ValidatorVotesResponse".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// Governance configuration data. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GovernanceParameters { /// The number of blocks during which a proposal is voted on. @@ -1048,13 +994,11 @@ impl ::prost::Name for GovernanceParameters { const NAME: &'static str = "GovernanceParameters"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.GovernanceParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.GovernanceParameters".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// Governance genesis state. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// Governance parameters. @@ -1065,12 +1009,10 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EncodedParameter { /// The component name in the `AppParameters`. @@ -1093,13 +1035,11 @@ impl ::prost::Name for EncodedParameter { const NAME: &'static str = "EncodedParameter"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EncodedParameter".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EncodedParameter".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// DEPRECATED +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ChangedAppParameters { /// Sct module parameters. @@ -1150,13 +1090,11 @@ impl ::prost::Name for ChangedAppParameters { const NAME: &'static str = "ChangedAppParameters"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ChangedAppParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ChangedAppParameters".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// DEPRECATED +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ChangedAppParametersSet { /// The set of app parameters at the time the proposal was submitted. @@ -1170,12 +1108,10 @@ impl ::prost::Name for ChangedAppParametersSet { const NAME: &'static str = "ChangedAppParametersSet"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.ChangedAppParametersSet".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.ChangedAppParametersSet".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct VotingPowerAtProposalStartRequest { /// The proposal id to request information on. @@ -1189,13 +1125,11 @@ impl ::prost::Name for VotingPowerAtProposalStartRequest { const NAME: &'static str = "VotingPowerAtProposalStartRequest"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.VotingPowerAtProposalStartRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.VotingPowerAtProposalStartRequest".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct VotingPowerAtProposalStartResponse { /// The voting power for the given identity key at the start of the proposal. /// TODO: since we don't support optional fields in our protos any more, @@ -1208,14 +1142,11 @@ impl ::prost::Name for VotingPowerAtProposalStartResponse { const NAME: &'static str = "VotingPowerAtProposalStartResponse"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.VotingPowerAtProposalStartResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.VotingPowerAtProposalStartResponse" - .into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct AllTalliedDelegatorVotesForProposalRequest { /// The proposal id to request information on. #[prost(uint64, tag = "2")] @@ -1225,14 +1156,10 @@ impl ::prost::Name for AllTalliedDelegatorVotesForProposalRequest { const NAME: &'static str = "AllTalliedDelegatorVotesForProposalRequest"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.AllTalliedDelegatorVotesForProposalRequest" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.AllTalliedDelegatorVotesForProposalRequest" - .into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AllTalliedDelegatorVotesForProposalResponse { /// The tally of delegator votes for a given validator for the proposal. @@ -1246,27 +1173,21 @@ impl ::prost::Name for AllTalliedDelegatorVotesForProposalResponse { const NAME: &'static str = "AllTalliedDelegatorVotesForProposalResponse"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.AllTalliedDelegatorVotesForProposalResponse" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.AllTalliedDelegatorVotesForProposalResponse" - .into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct NextProposalIdRequest {} impl ::prost::Name for NextProposalIdRequest { const NAME: &'static str = "NextProposalIdRequest"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.NextProposalIdRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.NextProposalIdRequest".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct NextProposalIdResponse { /// The next proposal ID. #[prost(uint64, tag = "1")] @@ -1276,14 +1197,12 @@ impl ::prost::Name for NextProposalIdResponse { const NAME: &'static str = "NextProposalIdResponse"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.NextProposalIdResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.NextProposalIdResponse".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// The ratio between two numbers, used in governance to describe vote thresholds and quorums. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Ratio { /// The numerator. #[prost(uint64, tag = "1")] @@ -1296,12 +1215,10 @@ impl ::prost::Name for Ratio { const NAME: &'static str = "Ratio"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.Ratio".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.Ratio".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventDelegatorVote { /// The delegator vote. @@ -1317,12 +1234,10 @@ impl ::prost::Name for EventDelegatorVote { const NAME: &'static str = "EventDelegatorVote"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EventDelegatorVote".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EventDelegatorVote".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventProposalDepositClaim { /// The deposit claim body. @@ -1333,12 +1248,10 @@ impl ::prost::Name for EventProposalDepositClaim { const NAME: &'static str = "EventProposalDepositClaim"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EventProposalDepositClaim".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EventProposalDepositClaim".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValidatorVote { /// The validator vote. @@ -1352,12 +1265,10 @@ impl ::prost::Name for EventValidatorVote { const NAME: &'static str = "EventValidatorVote"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EventValidatorVote".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EventValidatorVote".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventProposalWithdraw { /// Details on the withdrawn proposal. @@ -1368,12 +1279,10 @@ impl ::prost::Name for EventProposalWithdraw { const NAME: &'static str = "EventProposalWithdraw"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EventProposalWithdraw".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EventProposalWithdraw".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventProposalSubmit { /// Details on the submitted proposal. @@ -1390,12 +1299,10 @@ impl ::prost::Name for EventProposalSubmit { const NAME: &'static str = "EventProposalSubmit"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EventProposalSubmit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EventProposalSubmit".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventProposalPassed { /// The passed proposal. @@ -1406,12 +1313,10 @@ impl ::prost::Name for EventProposalPassed { const NAME: &'static str = "EventProposalPassed"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EventProposalPassed".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EventProposalPassed".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventProposalFailed { /// The failed proposal. @@ -1422,12 +1327,10 @@ impl ::prost::Name for EventProposalFailed { const NAME: &'static str = "EventProposalFailed"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EventProposalFailed".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EventProposalFailed".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventProposalSlashed { /// The slashed proposal. @@ -1438,10 +1341,7 @@ impl ::prost::Name for EventProposalSlashed { const NAME: &'static str = "EventProposalSlashed"; const PACKAGE: &'static str = "penumbra.core.component.governance.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.governance.v1.EventProposalSlashed".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.governance.v1.EventProposalSlashed".into() + ::prost::alloc::format!("penumbra.core.component.governance.v1.{}", Self::NAME) } } /// All the different kinds of proposals. @@ -1465,14 +1365,14 @@ impl ProposalKind { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "PROPOSAL_KIND_UNSPECIFIED", - Self::Signaling => "PROPOSAL_KIND_SIGNALING", - Self::Emergency => "PROPOSAL_KIND_EMERGENCY", - Self::ParameterChange => "PROPOSAL_KIND_PARAMETER_CHANGE", - Self::CommunityPoolSpend => "PROPOSAL_KIND_COMMUNITY_POOL_SPEND", - Self::UpgradePlan => "PROPOSAL_KIND_UPGRADE_PLAN", - Self::FreezeIbcClient => "PROPOSAL_KIND_FREEZE_IBC_CLIENT", - Self::UnfreezeIbcClient => "PROPOSAL_KIND_UNFREEZE_IBC_CLIENT", + ProposalKind::Unspecified => "PROPOSAL_KIND_UNSPECIFIED", + ProposalKind::Signaling => "PROPOSAL_KIND_SIGNALING", + ProposalKind::Emergency => "PROPOSAL_KIND_EMERGENCY", + ProposalKind::ParameterChange => "PROPOSAL_KIND_PARAMETER_CHANGE", + ProposalKind::CommunityPoolSpend => "PROPOSAL_KIND_COMMUNITY_POOL_SPEND", + ProposalKind::UpgradePlan => "PROPOSAL_KIND_UPGRADE_PLAN", + ProposalKind::FreezeIbcClient => "PROPOSAL_KIND_FREEZE_IBC_CLIENT", + ProposalKind::UnfreezeIbcClient => "PROPOSAL_KIND_UNFREEZE_IBC_CLIENT", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -1493,13 +1393,7 @@ impl ProposalKind { /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the governance component. @@ -1522,8 +1416,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -1548,7 +1442,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -1594,7 +1488,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1623,7 +1518,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1652,7 +1548,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1681,7 +1578,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1710,7 +1608,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1739,7 +1638,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1774,7 +1674,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1804,7 +1705,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1827,17 +1729,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { async fn proposal_info( &self, request: tonic::Request, @@ -1849,7 +1745,7 @@ pub mod query_service_server { type ProposalListStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; async fn proposal_list( &self, @@ -1876,7 +1772,7 @@ pub mod query_service_server { type ValidatorVotesStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; async fn validator_votes( &self, @@ -1899,7 +1795,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; async fn all_tallied_delegator_votes_for_proposal( &self, @@ -1915,7 +1811,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Used for computing voting power ? async fn proposal_rate_data( @@ -1928,18 +1824,20 @@ pub mod query_service_server { } /// Query operations for the governance component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -1989,8 +1887,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -2002,6 +1900,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.governance.v1.QueryService/ProposalInfo" => { #[allow(non_camel_case_types)] @@ -2032,6 +1931,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ProposalInfoSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2078,6 +1978,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ProposalListSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2123,6 +2024,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ProposalDataSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2168,6 +2070,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = NextProposalIdSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2214,6 +2117,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ValidatorVotesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2266,6 +2170,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = VotingPowerAtProposalStartSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2321,6 +2226,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AllTalliedDelegatorVotesForProposalSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2369,6 +2275,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ProposalRateDataSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2387,25 +2294,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -2417,9 +2319,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.governance.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.governance.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.governance.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.governance.v1.serde.rs index e7467a0f7f..e5a2731256 100644 --- a/crates/proto/src/gen/penumbra.core.component.governance.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.governance.v1.serde.rs @@ -12,7 +12,6 @@ impl serde::Serialize for AllTalliedDelegatorVotesForProposalRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.AllTalliedDelegatorVotesForProposalRequest", len)?; if self.proposal_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposalId", ToString::to_string(&self.proposal_id).as_str())?; } struct_ser.end() @@ -1063,12 +1062,10 @@ impl serde::Serialize for DelegatorVoteBody { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.DelegatorVoteBody", len)?; if self.proposal != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposal", ToString::to_string(&self.proposal).as_str())?; } if self.start_position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startPosition", ToString::to_string(&self.start_position).as_str())?; } if let Some(v) = self.vote.as_ref() { @@ -1276,12 +1273,10 @@ impl serde::Serialize for DelegatorVotePlan { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.DelegatorVotePlan", len)?; if self.proposal != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposal", ToString::to_string(&self.proposal).as_str())?; } if self.start_position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startPosition", ToString::to_string(&self.start_position).as_str())?; } if let Some(v) = self.vote.as_ref() { @@ -1292,7 +1287,6 @@ impl serde::Serialize for DelegatorVotePlan { } if self.staked_note_position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("stakedNotePosition", ToString::to_string(&self.staked_note_position).as_str())?; } if let Some(v) = self.unbonded_amount.as_ref() { @@ -1300,17 +1294,14 @@ impl serde::Serialize for DelegatorVotePlan { } if !self.randomizer.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("randomizer", pbjson::private::base64::encode(&self.randomizer).as_str())?; } if !self.proof_blinding_r.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingR", pbjson::private::base64::encode(&self.proof_blinding_r).as_str())?; } if !self.proof_blinding_s.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingS", pbjson::private::base64::encode(&self.proof_blinding_s).as_str())?; } struct_ser.end() @@ -2467,12 +2458,10 @@ impl serde::Serialize for EventProposalSubmit { } if self.start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startHeight", ToString::to_string(&self.start_height).as_str())?; } if self.end_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endHeight", ToString::to_string(&self.end_height).as_str())?; } struct_ser.end() @@ -2698,7 +2687,6 @@ impl serde::Serialize for EventValidatorVote { } if self.voting_power != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("votingPower", ToString::to_string(&self.voting_power).as_str())?; } struct_ser.end() @@ -2917,7 +2905,6 @@ impl serde::Serialize for GovernanceParameters { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.GovernanceParameters", len)?; if self.proposal_voting_blocks != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposalVotingBlocks", ToString::to_string(&self.proposal_voting_blocks).as_str())?; } if let Some(v) = self.proposal_deposit_amount.as_ref() { @@ -3149,7 +3136,6 @@ impl serde::Serialize for NextProposalIdResponse { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.NextProposalIdResponse", len)?; if self.next_proposal_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nextProposalId", ToString::to_string(&self.next_proposal_id).as_str())?; } struct_ser.end() @@ -3258,7 +3244,6 @@ impl serde::Serialize for Proposal { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.Proposal", len)?; if self.id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("id", ToString::to_string(&self.id).as_str())?; } if !self.title.is_empty() { @@ -4114,7 +4099,6 @@ impl serde::Serialize for proposal::UpgradePlan { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.Proposal.UpgradePlan", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() @@ -4213,7 +4197,6 @@ impl serde::Serialize for ProposalDataRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ProposalDataRequest", len)?; if self.proposal_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposalId", ToString::to_string(&self.proposal_id).as_str())?; } struct_ser.end() @@ -4331,17 +4314,14 @@ impl serde::Serialize for ProposalDataResponse { } if self.start_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startBlockHeight", ToString::to_string(&self.start_block_height).as_str())?; } if self.end_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endBlockHeight", ToString::to_string(&self.end_block_height).as_str())?; } if self.start_position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startPosition", ToString::to_string(&self.start_position).as_str())?; } if let Some(v) = self.state.as_ref() { @@ -4515,7 +4495,6 @@ impl serde::Serialize for ProposalDepositClaim { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ProposalDepositClaim", len)?; if self.proposal != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposal", ToString::to_string(&self.proposal).as_str())?; } if let Some(v) = self.deposit_amount.as_ref() { @@ -4643,7 +4622,6 @@ impl serde::Serialize for ProposalInfoRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ProposalInfoRequest", len)?; if self.proposal_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposalId", ToString::to_string(&self.proposal_id).as_str())?; } struct_ser.end() @@ -4746,12 +4724,10 @@ impl serde::Serialize for ProposalInfoResponse { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ProposalInfoResponse", len)?; if self.start_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startBlockHeight", ToString::to_string(&self.start_block_height).as_str())?; } if self.start_position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startPosition", ToString::to_string(&self.start_position).as_str())?; } struct_ser.end() @@ -5064,17 +5040,14 @@ impl serde::Serialize for ProposalListResponse { } if self.start_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startBlockHeight", ToString::to_string(&self.start_block_height).as_str())?; } if self.end_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endBlockHeight", ToString::to_string(&self.end_block_height).as_str())?; } if self.start_position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startPosition", ToString::to_string(&self.start_position).as_str())?; } if let Some(v) = self.state.as_ref() { @@ -5710,7 +5683,6 @@ impl serde::Serialize for ProposalRateDataRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ProposalRateDataRequest", len)?; if self.proposal_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposalId", ToString::to_string(&self.proposal_id).as_str())?; } struct_ser.end() @@ -6518,7 +6490,6 @@ impl serde::Serialize for ProposalWithdraw { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ProposalWithdraw", len)?; if self.proposal != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposal", ToString::to_string(&self.proposal).as_str())?; } if !self.reason.is_empty() { @@ -6634,12 +6605,10 @@ impl serde::Serialize for Ratio { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.Ratio", len)?; if self.numerator != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("numerator", ToString::to_string(&self.numerator).as_str())?; } if self.denominator != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("denominator", ToString::to_string(&self.denominator).as_str())?; } struct_ser.end() @@ -6757,17 +6726,14 @@ impl serde::Serialize for Tally { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.Tally", len)?; if self.yes != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("yes", ToString::to_string(&self.yes).as_str())?; } if self.no != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("no", ToString::to_string(&self.no).as_str())?; } if self.abstain != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("abstain", ToString::to_string(&self.abstain).as_str())?; } struct_ser.end() @@ -7017,7 +6983,6 @@ impl serde::Serialize for ValidatorVoteBody { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ValidatorVoteBody", len)?; if self.proposal != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposal", ToString::to_string(&self.proposal).as_str())?; } if let Some(v) = self.vote.as_ref() { @@ -7269,7 +7234,6 @@ impl serde::Serialize for ValidatorVotesRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ValidatorVotesRequest", len)?; if self.proposal_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposalId", ToString::to_string(&self.proposal_id).as_str())?; } struct_ser.end() @@ -7659,7 +7623,6 @@ impl serde::Serialize for VotingPowerAtProposalStartRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.VotingPowerAtProposalStartRequest", len)?; if self.proposal_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposalId", ToString::to_string(&self.proposal_id).as_str())?; } if let Some(v) = self.identity_key.as_ref() { @@ -7774,7 +7737,6 @@ impl serde::Serialize for VotingPowerAtProposalStartResponse { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.VotingPowerAtProposalStartResponse", len)?; if self.voting_power != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("votingPower", ToString::to_string(&self.voting_power).as_str())?; } struct_ser.end() @@ -7874,7 +7836,6 @@ impl serde::Serialize for ZkDelegatorVoteProof { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.governance.v1.ZKDelegatorVoteProof", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.component.ibc.v1.rs b/crates/proto/src/gen/penumbra.core.component.ibc.v1.rs index 014e3feb7b..995f53eef1 100644 --- a/crates/proto/src/gen/penumbra.core.component.ibc.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.ibc.v1.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct IbcRelay { #[prost(message, optional, tag = "1")] @@ -8,15 +8,13 @@ impl ::prost::Name for IbcRelay { const NAME: &'static str = "IbcRelay"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.IbcRelay".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.IbcRelay".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } /// FungibleTokenPacketData defines a struct for the packet payload /// See FungibleTokenPacketData spec: /// +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct FungibleTokenPacketData { /// the token denomination to be transferred @@ -39,13 +37,11 @@ impl ::prost::Name for FungibleTokenPacketData { const NAME: &'static str = "FungibleTokenPacketData"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.FungibleTokenPacketData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.FungibleTokenPacketData".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } /// A Penumbra transaction action requesting an ICS20 transfer. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Ics20Withdrawal { #[prost(message, optional, tag = "1")] @@ -91,12 +87,10 @@ impl ::prost::Name for Ics20Withdrawal { const NAME: &'static str = "Ics20Withdrawal"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.Ics20Withdrawal".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.Ics20Withdrawal".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ClientData { #[prost(string, tag = "1")] @@ -113,13 +107,11 @@ impl ::prost::Name for ClientData { const NAME: &'static str = "ClientData"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.ClientData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.ClientData".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ClientCounter { #[prost(uint64, tag = "1")] pub counter: u64, @@ -128,12 +120,10 @@ impl ::prost::Name for ClientCounter { const NAME: &'static str = "ClientCounter"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.ClientCounter".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.ClientCounter".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ConsensusState { #[prost(message, optional, tag = "1")] @@ -143,12 +133,10 @@ impl ::prost::Name for ConsensusState { const NAME: &'static str = "ConsensusState"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.ConsensusState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.ConsensusState".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct VerifiedHeights { #[prost(message, repeated, tag = "1")] @@ -158,13 +146,11 @@ impl ::prost::Name for VerifiedHeights { const NAME: &'static str = "VerifiedHeights"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.VerifiedHeights".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.VerifiedHeights".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ConnectionCounter { #[prost(uint64, tag = "1")] pub counter: u64, @@ -173,12 +159,10 @@ impl ::prost::Name for ConnectionCounter { const NAME: &'static str = "ConnectionCounter"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.ConnectionCounter".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.ConnectionCounter".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ClientConnections { #[prost(string, repeated, tag = "1")] @@ -188,14 +172,12 @@ impl ::prost::Name for ClientConnections { const NAME: &'static str = "ClientConnections"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.ClientConnections".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.ClientConnections".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } /// IBC configuration data. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct IbcParameters { /// Whether IBC (forming connections, processing IBC packets) is enabled. #[prost(bool, tag = "1")] @@ -211,14 +193,12 @@ impl ::prost::Name for IbcParameters { const NAME: &'static str = "IbcParameters"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.IbcParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.IbcParameters".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } /// IBC genesis state. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// IBC parameters. #[prost(message, optional, tag = "1")] @@ -228,9 +208,6 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.ibc.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.ibc.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.ibc.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.component.ibc.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.core.component.ibc.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.ibc.v1.serde.rs index e610db1c7a..846d45e6c9 100644 --- a/crates/proto/src/gen/penumbra.core.component.ibc.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.ibc.v1.serde.rs @@ -107,7 +107,6 @@ impl serde::Serialize for ClientCounter { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.ibc.v1.ClientCounter", len)?; if self.counter != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("counter", ToString::to_string(&self.counter).as_str())?; } struct_ser.end() @@ -224,7 +223,6 @@ impl serde::Serialize for ClientData { } if self.processed_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("processedHeight", ToString::to_string(&self.processed_height).as_str())?; } struct_ser.end() @@ -360,7 +358,6 @@ impl serde::Serialize for ConnectionCounter { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.ibc.v1.ConnectionCounter", len)?; if self.counter != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("counter", ToString::to_string(&self.counter).as_str())?; } struct_ser.end() @@ -1084,7 +1081,6 @@ impl serde::Serialize for Ics20Withdrawal { } if self.timeout_time != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("timeoutTime", ToString::to_string(&self.timeout_time).as_str())?; } if !self.source_channel.is_empty() { diff --git a/crates/proto/src/gen/penumbra.core.component.sct.v1.rs b/crates/proto/src/gen/penumbra.core.component.sct.v1.rs index 3d111b51d0..24724a4047 100644 --- a/crates/proto/src/gen/penumbra.core.component.sct.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.sct.v1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// Configuration data for the SCT component. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct SctParameters { /// The default duration of each epoch, in number of blocks. #[prost(uint64, tag = "1")] @@ -10,14 +10,12 @@ impl ::prost::Name for SctParameters { const NAME: &'static str = "SctParameters"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.SctParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.SctParameters".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// Sct-specific genesis content. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// The SctParameters present at genesis. #[prost(message, optional, tag = "1")] @@ -27,14 +25,12 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// An epoch is a sequentially numbered collection of contiguous blocks. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Epoch { /// The unique index of the epoch. #[prost(uint64, tag = "1")] @@ -47,16 +43,14 @@ impl ::prost::Name for Epoch { const NAME: &'static str = "Epoch"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.Epoch".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.Epoch".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// Metadata describing the source of a commitment in the state commitment tree. /// /// This message allows clients to track provenance of state commitments, and to /// decide whether or not to download block data. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommitmentSource { #[prost(oneof = "commitment_source::Source", tags = "1, 2, 20, 30, 40")] @@ -65,16 +59,16 @@ pub struct CommitmentSource { /// Nested message and enum types in `CommitmentSource`. pub mod commitment_source { /// The state commitment was included in the genesis state. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Genesis {} impl ::prost::Name for Genesis { const NAME: &'static str = "Genesis"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.CommitmentSource.Genesis".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.CommitmentSource.Genesis".into() + ::prost::alloc::format!( + "penumbra.core.component.sct.v1.CommitmentSource.{}", Self::NAME + ) } } /// The commitment was created by a transaction. @@ -82,6 +76,7 @@ pub mod commitment_source { /// When included in a `CompactBlock` via a `StatePayload`, the transaction source is "dehydrated" /// by stripping the `id` field and putting empty bytes in its place. When clients perform extended /// transaction fetch, they should match up transaction hashes to "rehydrate" the source info. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Transaction { /// The transaction ID, if specified. @@ -95,14 +90,14 @@ pub mod commitment_source { const NAME: &'static str = "Transaction"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.CommitmentSource.Transaction".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.CommitmentSource.Transaction".into() + ::prost::alloc::format!( + "penumbra.core.component.sct.v1.CommitmentSource.{}", Self::NAME + ) } } /// The commitment was created through a validator's funding stream. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct FundingStreamReward { /// The epoch index the rewards were issued in. #[prost(uint64, tag = "1")] @@ -112,26 +107,26 @@ pub mod commitment_source { const NAME: &'static str = "FundingStreamReward"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.CommitmentSource.FundingStreamReward".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.CommitmentSource.FundingStreamReward".into() + ::prost::alloc::format!( + "penumbra.core.component.sct.v1.CommitmentSource.{}", Self::NAME + ) } } /// The commitment was created through a `CommunityPoolOutput` in a governance-initated transaction. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommunityPoolOutput {} impl ::prost::Name for CommunityPoolOutput { const NAME: &'static str = "CommunityPoolOutput"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.CommitmentSource.CommunityPoolOutput".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.CommitmentSource.CommunityPoolOutput".into() + ::prost::alloc::format!( + "penumbra.core.component.sct.v1.CommitmentSource.{}", Self::NAME + ) } } /// The commitment was created by an inbound ICS20 transfer. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Ics20Transfer { /// The sequence number of the packet that triggered the transfer @@ -148,12 +143,12 @@ pub mod commitment_source { const NAME: &'static str = "Ics20Transfer"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.CommitmentSource.Ics20Transfer".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.CommitmentSource.Ics20Transfer".into() + ::prost::alloc::format!( + "penumbra.core.component.sct.v1.CommitmentSource.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Source { #[prost(message, tag = "1")] @@ -172,12 +167,10 @@ impl ::prost::Name for CommitmentSource { const NAME: &'static str = "CommitmentSource"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.CommitmentSource".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.CommitmentSource".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Nullifier { #[prost(bytes = "vec", tag = "1")] @@ -187,13 +180,11 @@ impl ::prost::Name for Nullifier { const NAME: &'static str = "Nullifier"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.Nullifier".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.Nullifier".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// Records information about what transaction spent a nullifier. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NullificationInfo { #[prost(bytes = "vec", tag = "1")] @@ -205,13 +196,11 @@ impl ::prost::Name for NullificationInfo { const NAME: &'static str = "NullificationInfo"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.NullificationInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.NullificationInfo".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// Event recording a new commitment added to the SCT. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventCommitment { #[prost(message, optional, tag = "1")] @@ -227,13 +216,11 @@ impl ::prost::Name for EventCommitment { const NAME: &'static str = "EventCommitment"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.EventCommitment".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.EventCommitment".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// Event recording an SCT anchor (global root). +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventAnchor { #[prost(message, optional, tag = "1")] @@ -249,13 +236,11 @@ impl ::prost::Name for EventAnchor { const NAME: &'static str = "EventAnchor"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.EventAnchor".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.EventAnchor".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// Event recording an SCT epoch root. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventEpochRoot { #[prost(message, optional, tag = "1")] @@ -271,13 +256,11 @@ impl ::prost::Name for EventEpochRoot { const NAME: &'static str = "EventEpochRoot"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.EventEpochRoot".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.EventEpochRoot".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// Event recording an SCT block root. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventBlockRoot { #[prost(message, optional, tag = "1")] @@ -293,13 +276,11 @@ impl ::prost::Name for EventBlockRoot { const NAME: &'static str = "EventBlockRoot"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.EventBlockRoot".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.EventBlockRoot".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct EpochByHeightRequest { #[prost(uint64, tag = "1")] pub height: u64, @@ -308,13 +289,11 @@ impl ::prost::Name for EpochByHeightRequest { const NAME: &'static str = "EpochByHeightRequest"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.EpochByHeightRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.EpochByHeightRequest".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct EpochByHeightResponse { #[prost(message, optional, tag = "1")] pub epoch: ::core::option::Option, @@ -323,13 +302,11 @@ impl ::prost::Name for EpochByHeightResponse { const NAME: &'static str = "EpochByHeightResponse"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.EpochByHeightResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.EpochByHeightResponse".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct AnchorByHeightRequest { #[prost(uint64, tag = "1")] pub height: u64, @@ -338,12 +315,10 @@ impl ::prost::Name for AnchorByHeightRequest { const NAME: &'static str = "AnchorByHeightRequest"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.AnchorByHeightRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.AnchorByHeightRequest".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AnchorByHeightResponse { #[prost(message, optional, tag = "1")] @@ -355,13 +330,11 @@ impl ::prost::Name for AnchorByHeightResponse { const NAME: &'static str = "AnchorByHeightResponse"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.AnchorByHeightResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.AnchorByHeightResponse".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct TimestampByHeightRequest { #[prost(uint64, tag = "1")] pub height: u64, @@ -370,13 +343,11 @@ impl ::prost::Name for TimestampByHeightRequest { const NAME: &'static str = "TimestampByHeightRequest"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.TimestampByHeightRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.TimestampByHeightRequest".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct TimestampByHeightResponse { #[prost(message, optional, tag = "1")] pub timestamp: ::core::option::Option<::pbjson_types::Timestamp>, @@ -385,22 +356,13 @@ impl ::prost::Name for TimestampByHeightResponse { const NAME: &'static str = "TimestampByHeightResponse"; const PACKAGE: &'static str = "penumbra.core.component.sct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.sct.v1.TimestampByHeightResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.sct.v1.TimestampByHeightResponse".into() + ::prost::alloc::format!("penumbra.core.component.sct.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the SCT component. @@ -423,8 +385,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -449,7 +411,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -495,7 +457,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -524,7 +487,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -553,7 +517,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -576,17 +541,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { async fn anchor_by_height( &self, request: tonic::Request, @@ -611,18 +570,20 @@ pub mod query_service_server { } /// Query operations for the SCT component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -672,8 +633,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -685,6 +646,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.sct.v1.QueryService/AnchorByHeight" => { #[allow(non_camel_case_types)] @@ -715,6 +677,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AnchorByHeightSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -760,6 +723,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = EpochByHeightSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -806,6 +770,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TimestampByHeightSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -824,25 +789,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -854,9 +814,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.sct.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.sct.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.sct.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.sct.v1.serde.rs index 064ca3d29f..773dcedfea 100644 --- a/crates/proto/src/gen/penumbra.core.component.sct.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.sct.v1.serde.rs @@ -12,7 +12,6 @@ impl serde::Serialize for AnchorByHeightRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.AnchorByHeightRequest", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() @@ -433,7 +432,6 @@ impl serde::Serialize for commitment_source::FundingStreamReward { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.CommitmentSource.FundingStreamReward", len)?; if self.epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochIndex", ToString::to_string(&self.epoch_index).as_str())?; } struct_ser.end() @@ -611,7 +609,6 @@ impl serde::Serialize for commitment_source::Ics20Transfer { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.CommitmentSource.Ics20Transfer", len)?; if self.packet_seq != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("packetSeq", ToString::to_string(&self.packet_seq).as_str())?; } if !self.channel_id.is_empty() { @@ -740,7 +737,6 @@ impl serde::Serialize for commitment_source::Transaction { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.CommitmentSource.Transaction", len)?; if !self.id.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("id", pbjson::private::base64::encode(&self.id).as_str())?; } struct_ser.end() @@ -842,12 +838,10 @@ impl serde::Serialize for Epoch { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.Epoch", len)?; if self.index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("index", ToString::to_string(&self.index).as_str())?; } if self.start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startHeight", ToString::to_string(&self.start_height).as_str())?; } struct_ser.end() @@ -960,7 +954,6 @@ impl serde::Serialize for EpochByHeightRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.EpochByHeightRequest", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() @@ -1163,7 +1156,6 @@ impl serde::Serialize for EventAnchor { } if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if let Some(v) = self.timestamp.as_ref() { @@ -1296,7 +1288,6 @@ impl serde::Serialize for EventBlockRoot { } if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if let Some(v) = self.timestamp.as_ref() { @@ -1429,7 +1420,6 @@ impl serde::Serialize for EventCommitment { } if self.position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("position", ToString::to_string(&self.position).as_str())?; } if let Some(v) = self.source.as_ref() { @@ -1562,7 +1552,6 @@ impl serde::Serialize for EventEpochRoot { } if self.index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("index", ToString::to_string(&self.index).as_str())?; } if let Some(v) = self.timestamp.as_ref() { @@ -1785,12 +1774,10 @@ impl serde::Serialize for NullificationInfo { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.NullificationInfo", len)?; if !self.id.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("id", pbjson::private::base64::encode(&self.id).as_str())?; } if self.spend_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("spendHeight", ToString::to_string(&self.spend_height).as_str())?; } struct_ser.end() @@ -1903,7 +1890,6 @@ impl serde::Serialize for Nullifier { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.Nullifier", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -2002,7 +1988,6 @@ impl serde::Serialize for SctParameters { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.SctParameters", len)?; if self.epoch_duration != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochDuration", ToString::to_string(&self.epoch_duration).as_str())?; } struct_ser.end() @@ -2102,7 +2087,6 @@ impl serde::Serialize for TimestampByHeightRequest { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.sct.v1.TimestampByHeightRequest", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.component.shielded_pool.v1.rs b/crates/proto/src/gen/penumbra.core.component.shielded_pool.v1.rs index a0170d3e98..e287e3e5e6 100644 --- a/crates/proto/src/gen/penumbra.core.component.shielded_pool.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.shielded_pool.v1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// Configuration data for the shielded pool component. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ShieldedPoolParameters { #[deprecated] #[prost(message, optional, tag = "1")] @@ -12,13 +12,13 @@ impl ::prost::Name for ShieldedPoolParameters { const NAME: &'static str = "ShieldedPoolParameters"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.ShieldedPoolParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.ShieldedPoolParameters".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Genesis data for the shielded pool component. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { #[prost(message, optional, tag = "1")] @@ -29,6 +29,7 @@ pub struct GenesisContent { } /// Nested message and enum types in `GenesisContent`. pub mod genesis_content { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Allocation { #[prost(message, optional, tag = "1")] @@ -44,10 +45,9 @@ pub mod genesis_content { const NAME: &'static str = "Allocation"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.GenesisContent.Allocation".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.GenesisContent.Allocation".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.GenesisContent.{}", Self::NAME + ) } } } @@ -55,14 +55,14 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.GenesisContent".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// The parameters which control how the FMD parameters evolve over time. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct FmdMetaParameters { /// How much time users have to transition to new parameters. #[prost(uint64, tag = "1")] @@ -74,7 +74,8 @@ pub struct FmdMetaParameters { /// Nested message and enum types in `FmdMetaParameters`. pub mod fmd_meta_parameters { /// A sliding window algorithm for updating the parameters. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct AlgorithmSlidingWindow { /// The window size, in terms of the number of update periods. /// @@ -89,16 +90,15 @@ pub mod fmd_meta_parameters { const NAME: &'static str = "AlgorithmSlidingWindow"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.FmdMetaParameters.AlgorithmSlidingWindow" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.FmdMetaParameters.AlgorithmSlidingWindow" - .into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.FmdMetaParameters.{}", + Self::NAME + ) } } /// The algorithm governing how the parameters change. - #[derive(Clone, Copy, PartialEq, ::prost::Oneof)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Algorithm { #[prost(uint32, tag = "2")] FixedPrecisionBits(u32), @@ -110,14 +110,14 @@ impl ::prost::Name for FmdMetaParameters { const NAME: &'static str = "FmdMetaParameters"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.FmdMetaParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.FmdMetaParameters".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Used to potentially store state for the FMD Meta Parameters algorithm. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct FmdMetaParametersAlgorithmState { #[prost(oneof = "fmd_meta_parameters_algorithm_state::State", tags = "1, 2")] pub state: ::core::option::Option, @@ -125,22 +125,22 @@ pub struct FmdMetaParametersAlgorithmState { /// Nested message and enum types in `FmdMetaParametersAlgorithmState`. pub mod fmd_meta_parameters_algorithm_state { /// The state used for the fixed algorithm. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct FixedState {} impl ::prost::Name for FixedState { const NAME: &'static str = "FixedState"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.FmdMetaParametersAlgorithmState.FixedState" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.FmdMetaParametersAlgorithmState.FixedState" - .into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.FmdMetaParametersAlgorithmState.{}", + Self::NAME + ) } } /// The state used for the sliding window algorithm. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct SlidingWindowState { /// The number of clues previously observed, approximately. #[prost(uint32, tag = "1")] @@ -150,15 +150,14 @@ pub mod fmd_meta_parameters_algorithm_state { const NAME: &'static str = "SlidingWindowState"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.FmdMetaParametersAlgorithmState.SlidingWindowState" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.FmdMetaParametersAlgorithmState.SlidingWindowState" - .into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.FmdMetaParametersAlgorithmState.{}", + Self::NAME + ) } } - #[derive(Clone, Copy, PartialEq, ::prost::Oneof)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum State { #[prost(message, tag = "1")] Fixed(FixedState), @@ -170,15 +169,14 @@ impl ::prost::Name for FmdMetaParametersAlgorithmState { const NAME: &'static str = "FmdMetaParametersAlgorithmState"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.FmdMetaParametersAlgorithmState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.FmdMetaParametersAlgorithmState" - .into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Parameters for Fuzzy Message Detection -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct FmdParameters { #[prost(uint32, tag = "1")] pub precision_bits: u32, @@ -189,12 +187,12 @@ impl ::prost::Name for FmdParameters { const NAME: &'static str = "FmdParameters"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.FmdParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.FmdParameters".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Note { #[prost(message, optional, tag = "1")] @@ -208,12 +206,12 @@ impl ::prost::Name for Note { const NAME: &'static str = "Note"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.Note".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.Note".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NoteView { #[prost(message, optional, tag = "1")] @@ -227,14 +225,14 @@ impl ::prost::Name for NoteView { const NAME: &'static str = "NoteView"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.NoteView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.NoteView".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// An encrypted note. /// 176 = 80(address) + 16(amount) + 32(asset ID) + 32(rseed) + 16(MAC) bytes. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NoteCiphertext { #[prost(bytes = "vec", tag = "1")] @@ -244,14 +242,14 @@ impl ::prost::Name for NoteCiphertext { const NAME: &'static str = "NoteCiphertext"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.NoteCiphertext".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.NoteCiphertext".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// The body of an output description, including only the minimal /// data required to scan and process the output. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NotePayload { /// The note commitment for the output note. 32 bytes. @@ -271,13 +269,13 @@ impl ::prost::Name for NotePayload { const NAME: &'static str = "NotePayload"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.NotePayload".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.NotePayload".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// A Penumbra ZK output proof. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ZkOutputProof { #[prost(bytes = "vec", tag = "1")] @@ -287,13 +285,13 @@ impl ::prost::Name for ZkOutputProof { const NAME: &'static str = "ZKOutputProof"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.ZKOutputProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.ZKOutputProof".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// A Penumbra ZK spend proof. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ZkSpendProof { #[prost(bytes = "vec", tag = "1")] @@ -303,13 +301,13 @@ impl ::prost::Name for ZkSpendProof { const NAME: &'static str = "ZKSpendProof"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.ZKSpendProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.ZKSpendProof".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// A Penumbra ZK nullifier derivation proof. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ZkNullifierDerivationProof { #[prost(bytes = "vec", tag = "1")] @@ -319,13 +317,13 @@ impl ::prost::Name for ZkNullifierDerivationProof { const NAME: &'static str = "ZKNullifierDerivationProof"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.ZKNullifierDerivationProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.ZKNullifierDerivationProof".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Spends a shielded note. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Spend { /// The effecting data of the spend. @@ -344,13 +342,13 @@ impl ::prost::Name for Spend { const NAME: &'static str = "Spend"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.Spend".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.Spend".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// ABCI Event recording a spend. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventSpend { #[prost(message, optional, tag = "1")] @@ -360,13 +358,13 @@ impl ::prost::Name for EventSpend { const NAME: &'static str = "EventSpend"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.EventSpend".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.EventSpend".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// ABCI Event recording an output. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventOutput { #[prost(message, optional, tag = "1")] @@ -378,13 +376,13 @@ impl ::prost::Name for EventOutput { const NAME: &'static str = "EventOutput"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.EventOutput".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.EventOutput".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// ABCI Event recording a clue. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventBroadcastClue { #[prost(message, optional, tag = "1")] @@ -398,15 +396,15 @@ impl ::prost::Name for EventBroadcastClue { const NAME: &'static str = "EventBroadcastClue"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.EventBroadcastClue".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.EventBroadcastClue".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// The body of a spend description, containing only the effecting data /// describing changes to the ledger, and not the authorizing data that allows /// those changes to be performed. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpendBody { /// A commitment to the value of the input note. @@ -430,12 +428,12 @@ impl ::prost::Name for SpendBody { const NAME: &'static str = "SpendBody"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.SpendBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.SpendBody".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpendView { #[prost(oneof = "spend_view::SpendView", tags = "1, 2")] @@ -443,6 +441,7 @@ pub struct SpendView { } /// Nested message and enum types in `SpendView`. pub mod spend_view { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Visible { #[prost(message, optional, tag = "1")] @@ -454,12 +453,12 @@ pub mod spend_view { const NAME: &'static str = "Visible"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.SpendView.Visible".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.SpendView.Visible".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.SpendView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Opaque { #[prost(message, optional, tag = "1")] @@ -469,12 +468,12 @@ pub mod spend_view { const NAME: &'static str = "Opaque"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.SpendView.Opaque".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.SpendView.Opaque".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.SpendView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum SpendView { #[prost(message, tag = "1")] @@ -487,12 +486,12 @@ impl ::prost::Name for SpendView { const NAME: &'static str = "SpendView"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.SpendView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.SpendView".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpendPlan { /// The plaintext note we plan to spend. @@ -518,13 +517,13 @@ impl ::prost::Name for SpendPlan { const NAME: &'static str = "SpendPlan"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.SpendPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.SpendPlan".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Creates a new shielded note. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Output { /// The effecting data for the output. @@ -538,15 +537,15 @@ impl ::prost::Name for Output { const NAME: &'static str = "Output"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.Output".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.Output".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// The body of an output description, containing only the effecting data /// describing changes to the ledger, and not the authorizing data that allows /// those changes to be performed. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct OutputBody { /// The minimal data required to scan and process the new output note. @@ -569,12 +568,12 @@ impl ::prost::Name for OutputBody { const NAME: &'static str = "OutputBody"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.OutputBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.OutputBody".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct OutputView { #[prost(oneof = "output_view::OutputView", tags = "1, 2")] @@ -582,6 +581,7 @@ pub struct OutputView { } /// Nested message and enum types in `OutputView`. pub mod output_view { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Visible { #[prost(message, optional, tag = "1")] @@ -597,12 +597,12 @@ pub mod output_view { const NAME: &'static str = "Visible"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.OutputView.Visible".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.OutputView.Visible".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.OutputView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Opaque { #[prost(message, optional, tag = "1")] @@ -612,12 +612,12 @@ pub mod output_view { const NAME: &'static str = "Opaque"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.OutputView.Opaque".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.OutputView.Opaque".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.OutputView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum OutputView { #[prost(message, tag = "1")] @@ -630,12 +630,12 @@ impl ::prost::Name for OutputView { const NAME: &'static str = "OutputView"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.OutputView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.OutputView".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct OutputPlan { /// The value to send to this output. @@ -661,13 +661,13 @@ impl ::prost::Name for OutputPlan { const NAME: &'static str = "OutputPlan"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.OutputPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.OutputPlan".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Requests information on an asset by asset id +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetMetadataByIdRequest { /// The asset id to request information on. @@ -678,12 +678,12 @@ impl ::prost::Name for AssetMetadataByIdRequest { const NAME: &'static str = "AssetMetadataByIdRequest"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.AssetMetadataByIdRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.AssetMetadataByIdRequest".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetMetadataByIdResponse { /// If present, information on the requested asset. @@ -696,13 +696,13 @@ impl ::prost::Name for AssetMetadataByIdResponse { const NAME: &'static str = "AssetMetadataByIdResponse"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.AssetMetadataByIdResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.AssetMetadataByIdResponse".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Requests information on an asset by multiple asset ids +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetMetadataByIdsRequest { /// The asset IDs to request information on. Note that node is neither required @@ -714,12 +714,12 @@ impl ::prost::Name for AssetMetadataByIdsRequest { const NAME: &'static str = "AssetMetadataByIdsRequest"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.AssetMetadataByIdsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.AssetMetadataByIdsRequest".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetMetadataByIdsResponse { /// A single asset metadata streamed from the node. @@ -730,16 +730,16 @@ impl ::prost::Name for AssetMetadataByIdsResponse { const NAME: &'static str = "AssetMetadataByIdsResponse"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.AssetMetadataByIdsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.AssetMetadataByIdsResponse".into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Metadata about the packet associated with the transfer. /// /// This allows identifying which specific packet is associated with the transfer. /// Implicitly, both ports are going to be "transfer". +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct FungibleTokenTransferPacketMetadata { /// The identifier for the channel on *this* chain. @@ -753,14 +753,12 @@ impl ::prost::Name for FungibleTokenTransferPacketMetadata { const NAME: &'static str = "FungibleTokenTransferPacketMetadata"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.FungibleTokenTransferPacketMetadata" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.FungibleTokenTransferPacketMetadata" - .into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventOutboundFungibleTokenTransfer { /// The value being transferred out of the chain. @@ -779,14 +777,12 @@ impl ::prost::Name for EventOutboundFungibleTokenTransfer { const NAME: &'static str = "EventOutboundFungibleTokenTransfer"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.EventOutboundFungibleTokenTransfer" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.EventOutboundFungibleTokenTransfer" - .into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventOutboundFungibleTokenRefund { /// The value being refunded. @@ -836,9 +832,9 @@ pub mod event_outbound_fungible_token_refund { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "REASON_UNSPECIFIED", - Self::Timeout => "REASON_TIMEOUT", - Self::Error => "REASON_ERROR", + Reason::Unspecified => "REASON_UNSPECIFIED", + Reason::Timeout => "REASON_TIMEOUT", + Reason::Error => "REASON_ERROR", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -856,14 +852,12 @@ impl ::prost::Name for EventOutboundFungibleTokenRefund { const NAME: &'static str = "EventOutboundFungibleTokenRefund"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.EventOutboundFungibleTokenRefund" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.EventOutboundFungibleTokenRefund" - .into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventInboundFungibleTokenTransfer { /// The value being transferred in. @@ -882,24 +876,15 @@ impl ::prost::Name for EventInboundFungibleTokenTransfer { const NAME: &'static str = "EventInboundFungibleTokenTransfer"; const PACKAGE: &'static str = "penumbra.core.component.shielded_pool.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.shielded_pool.v1.EventInboundFungibleTokenTransfer" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.shielded_pool.v1.EventInboundFungibleTokenTransfer" - .into() + ::prost::alloc::format!( + "penumbra.core.component.shielded_pool.v1.{}", Self::NAME + ) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the shielded pool component. @@ -922,8 +907,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -948,7 +933,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -994,7 +979,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1028,7 +1014,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1051,17 +1038,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { async fn asset_metadata_by_id( &self, request: tonic::Request, @@ -1076,7 +1057,7 @@ pub mod query_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Requests a stream of asset metadata, given an array of asset IDs. Responses /// may be streamed in a different order from that of the asset IDs in the @@ -1093,18 +1074,20 @@ pub mod query_service_server { } /// Query operations for the shielded pool component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -1154,8 +1137,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -1167,6 +1150,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.shielded_pool.v1.QueryService/AssetMetadataById" => { #[allow(non_camel_case_types)] @@ -1198,6 +1182,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AssetMetadataByIdSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1246,6 +1231,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AssetMetadataByIdsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1264,25 +1250,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -1294,9 +1275,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.shielded_pool.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.shielded_pool.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.shielded_pool.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.shielded_pool.v1.serde.rs index 856dab92e1..c77243e1d0 100644 --- a/crates/proto/src/gen/penumbra.core.component.shielded_pool.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.shielded_pool.v1.serde.rs @@ -1233,7 +1233,6 @@ impl serde::Serialize for FmdMetaParameters { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.shielded_pool.v1.FmdMetaParameters", len)?; if self.fmd_grace_period_blocks != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("fmdGracePeriodBlocks", ToString::to_string(&self.fmd_grace_period_blocks).as_str())?; } if let Some(v) = self.algorithm.as_ref() { @@ -1774,7 +1773,6 @@ impl serde::Serialize for FmdParameters { } if self.as_of_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("asOfBlockHeight", ToString::to_string(&self.as_of_block_height).as_str())?; } struct_ser.end() @@ -1894,7 +1892,6 @@ impl serde::Serialize for FungibleTokenTransferPacketMetadata { } if self.sequence != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sequence", ToString::to_string(&self.sequence).as_str())?; } struct_ser.end() @@ -2255,7 +2252,6 @@ impl serde::Serialize for Note { } if !self.rseed.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("rseed", pbjson::private::base64::encode(&self.rseed).as_str())?; } if let Some(v) = self.address.as_ref() { @@ -2379,7 +2375,6 @@ impl serde::Serialize for NoteCiphertext { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.shielded_pool.v1.NoteCiphertext", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -2487,7 +2482,6 @@ impl serde::Serialize for NotePayload { } if !self.ephemeral_key.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("ephemeralKey", pbjson::private::base64::encode(&self.ephemeral_key).as_str())?; } if let Some(v) = self.encrypted_note.as_ref() { @@ -2623,7 +2617,6 @@ impl serde::Serialize for NoteView { } if !self.rseed.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("rseed", pbjson::private::base64::encode(&self.rseed).as_str())?; } if let Some(v) = self.address.as_ref() { @@ -2874,12 +2867,10 @@ impl serde::Serialize for OutputBody { } if !self.wrapped_memo_key.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("wrappedMemoKey", pbjson::private::base64::encode(&self.wrapped_memo_key).as_str())?; } if !self.ovk_wrapped_key.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("ovkWrappedKey", pbjson::private::base64::encode(&self.ovk_wrapped_key).as_str())?; } struct_ser.end() @@ -3038,22 +3029,18 @@ impl serde::Serialize for OutputPlan { } if !self.rseed.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("rseed", pbjson::private::base64::encode(&self.rseed).as_str())?; } if !self.value_blinding.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("valueBlinding", pbjson::private::base64::encode(&self.value_blinding).as_str())?; } if !self.proof_blinding_r.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingR", pbjson::private::base64::encode(&self.proof_blinding_r).as_str())?; } if !self.proof_blinding_s.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingS", pbjson::private::base64::encode(&self.proof_blinding_s).as_str())?; } struct_ser.end() @@ -3817,7 +3804,6 @@ impl serde::Serialize for SpendBody { } if !self.encrypted_backref.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("encryptedBackref", pbjson::private::base64::encode(&self.encrypted_backref).as_str())?; } struct_ser.end() @@ -3969,27 +3955,22 @@ impl serde::Serialize for SpendPlan { } if self.position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("position", ToString::to_string(&self.position).as_str())?; } if !self.randomizer.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("randomizer", pbjson::private::base64::encode(&self.randomizer).as_str())?; } if !self.value_blinding.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("valueBlinding", pbjson::private::base64::encode(&self.value_blinding).as_str())?; } if !self.proof_blinding_r.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingR", pbjson::private::base64::encode(&self.proof_blinding_r).as_str())?; } if !self.proof_blinding_s.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingS", pbjson::private::base64::encode(&self.proof_blinding_s).as_str())?; } struct_ser.end() @@ -4474,7 +4455,6 @@ impl serde::Serialize for ZkNullifierDerivationProof { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.shielded_pool.v1.ZKNullifierDerivationProof", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -4573,7 +4553,6 @@ impl serde::Serialize for ZkOutputProof { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.shielded_pool.v1.ZKOutputProof", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -4672,7 +4651,6 @@ impl serde::Serialize for ZkSpendProof { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.shielded_pool.v1.ZKSpendProof", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.component.stake.v1.rs b/crates/proto/src/gen/penumbra.core.component.stake.v1.rs index 46531b65a3..27b7ffe457 100644 --- a/crates/proto/src/gen/penumbra.core.component.stake.v1.rs +++ b/crates/proto/src/gen/penumbra.core.component.stake.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// A Penumbra ZK undelegate claim proof. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ZkUndelegateClaimProof { #[prost(bytes = "vec", tag = "1")] @@ -9,13 +9,11 @@ impl ::prost::Name for ZkUndelegateClaimProof { const NAME: &'static str = "ZKUndelegateClaimProof"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ZKUndelegateClaimProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ZKUndelegateClaimProof".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Describes a validator's configuration data. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Validator { /// The validator's identity verification key. @@ -56,13 +54,11 @@ impl ::prost::Name for Validator { const NAME: &'static str = "Validator"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.Validator".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.Validator".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// For storing the list of keys of known validators. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorList { #[prost(message, repeated, tag = "1")] @@ -74,13 +70,11 @@ impl ::prost::Name for ValidatorList { const NAME: &'static str = "ValidatorList"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorList".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorList".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// A portion of a validator's commission. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct FundingStream { /// The recipient of the funding stream. @@ -89,6 +83,7 @@ pub struct FundingStream { } /// Nested message and enum types in `FundingStream`. pub mod funding_stream { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ToAddress { /// The destination address for the funding stream. @@ -103,13 +98,13 @@ pub mod funding_stream { const NAME: &'static str = "ToAddress"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.FundingStream.ToAddress".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.FundingStream.ToAddress".into() + ::prost::alloc::format!( + "penumbra.core.component.stake.v1.FundingStream.{}", Self::NAME + ) } } - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct ToCommunityPool { /// The portion of the staking reward for the entire delegation pool /// allocated to this funding stream, specified in basis points. @@ -120,13 +115,13 @@ pub mod funding_stream { const NAME: &'static str = "ToCommunityPool"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.FundingStream.ToCommunityPool".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.FundingStream.ToCommunityPool".into() + ::prost::alloc::format!( + "penumbra.core.component.stake.v1.FundingStream.{}", Self::NAME + ) } } /// The recipient of the funding stream. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Recipient { #[prost(message, tag = "1")] @@ -139,13 +134,11 @@ impl ::prost::Name for FundingStream { const NAME: &'static str = "FundingStream"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.FundingStream".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.FundingStream".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Describes the reward and exchange rates and voting power for a validator in some epoch. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RateData { #[prost(message, optional, tag = "1")] @@ -170,14 +163,12 @@ impl ::prost::Name for RateData { const NAME: &'static str = "RateData"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.RateData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.RateData".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Describes the base reward and exchange rates in some epoch. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct BaseRateData { #[prost(uint64, tag = "1")] pub epoch_index: u64, @@ -190,13 +181,11 @@ impl ::prost::Name for BaseRateData { const NAME: &'static str = "BaseRateData"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.BaseRateData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.BaseRateData".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Describes the current state of a validator on-chain +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorStatus { #[prost(message, optional, tag = "1")] @@ -212,14 +201,12 @@ impl ::prost::Name for ValidatorStatus { const NAME: &'static str = "ValidatorStatus"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorStatus".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorStatus".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Describes the unbonding state of a validator's stake pool. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct BondingState { #[prost(enumeration = "bonding_state::BondingStateEnum", tag = "1")] pub state: i32, @@ -256,10 +243,10 @@ pub mod bonding_state { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "BONDING_STATE_ENUM_UNSPECIFIED", - Self::Bonded => "BONDING_STATE_ENUM_BONDED", - Self::Unbonding => "BONDING_STATE_ENUM_UNBONDING", - Self::Unbonded => "BONDING_STATE_ENUM_UNBONDED", + BondingStateEnum::Unspecified => "BONDING_STATE_ENUM_UNSPECIFIED", + BondingStateEnum::Bonded => "BONDING_STATE_ENUM_BONDED", + BondingStateEnum::Unbonding => "BONDING_STATE_ENUM_UNBONDING", + BondingStateEnum::Unbonded => "BONDING_STATE_ENUM_UNBONDED", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -278,14 +265,12 @@ impl ::prost::Name for BondingState { const NAME: &'static str = "BondingState"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.BondingState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.BondingState".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Describes the state of a validator -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorState { #[prost(enumeration = "validator_state::ValidatorStateEnum", tag = "1")] pub state: i32, @@ -320,13 +305,13 @@ pub mod validator_state { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "VALIDATOR_STATE_ENUM_UNSPECIFIED", - Self::Defined => "VALIDATOR_STATE_ENUM_DEFINED", - Self::Inactive => "VALIDATOR_STATE_ENUM_INACTIVE", - Self::Active => "VALIDATOR_STATE_ENUM_ACTIVE", - Self::Jailed => "VALIDATOR_STATE_ENUM_JAILED", - Self::Tombstoned => "VALIDATOR_STATE_ENUM_TOMBSTONED", - Self::Disabled => "VALIDATOR_STATE_ENUM_DISABLED", + ValidatorStateEnum::Unspecified => "VALIDATOR_STATE_ENUM_UNSPECIFIED", + ValidatorStateEnum::Defined => "VALIDATOR_STATE_ENUM_DEFINED", + ValidatorStateEnum::Inactive => "VALIDATOR_STATE_ENUM_INACTIVE", + ValidatorStateEnum::Active => "VALIDATOR_STATE_ENUM_ACTIVE", + ValidatorStateEnum::Jailed => "VALIDATOR_STATE_ENUM_JAILED", + ValidatorStateEnum::Tombstoned => "VALIDATOR_STATE_ENUM_TOMBSTONED", + ValidatorStateEnum::Disabled => "VALIDATOR_STATE_ENUM_DISABLED", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -348,13 +333,11 @@ impl ::prost::Name for ValidatorState { const NAME: &'static str = "ValidatorState"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorState".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorState".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Combines all validator info into a single packet. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorInfo { #[prost(message, optional, tag = "1")] @@ -368,13 +351,11 @@ impl ::prost::Name for ValidatorInfo { const NAME: &'static str = "ValidatorInfo"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorInfo".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// A transaction action (re)defining a validator. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorDefinition { /// The configuration data for the validator. @@ -388,13 +369,11 @@ impl ::prost::Name for ValidatorDefinition { const NAME: &'static str = "ValidatorDefinition"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorDefinition".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorDefinition".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// A transaction action adding stake to a validator's delegation pool. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Delegate { /// The identity key of the validator to delegate to. @@ -422,13 +401,11 @@ impl ::prost::Name for Delegate { const NAME: &'static str = "Delegate"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.Delegate".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.Delegate".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// A transaction action withdrawing stake from a validator's delegation pool. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Undelegate { /// The identity key of the validator to undelegate from. @@ -458,14 +435,12 @@ impl ::prost::Name for Undelegate { const NAME: &'static str = "Undelegate"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.Undelegate".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.Undelegate".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// A transaction action finishing an undelegation, converting (slashable) /// "unbonding tokens" to (unslashable) staking tokens. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UndelegateClaim { #[prost(message, optional, tag = "1")] @@ -477,12 +452,10 @@ impl ::prost::Name for UndelegateClaim { const NAME: &'static str = "UndelegateClaim"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.UndelegateClaim".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.UndelegateClaim".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UndelegateClaimBody { /// The identity key of the validator to finish undelegating from. @@ -511,12 +484,10 @@ impl ::prost::Name for UndelegateClaimBody { const NAME: &'static str = "UndelegateClaimBody"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.UndelegateClaimBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.UndelegateClaimBody".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UndelegateClaimPlan { /// The identity key of the validator to finish undelegating from. @@ -553,13 +524,11 @@ impl ::prost::Name for UndelegateClaimPlan { const NAME: &'static str = "UndelegateClaimPlan"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.UndelegateClaimPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.UndelegateClaimPlan".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// A list of pending delegations and undelegations. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DelegationChanges { #[prost(message, repeated, tag = "1")] @@ -571,13 +540,11 @@ impl ::prost::Name for DelegationChanges { const NAME: &'static str = "DelegationChanges"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.DelegationChanges".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.DelegationChanges".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Track's a validator's uptime. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Uptime { #[prost(uint64, tag = "1")] @@ -591,14 +558,12 @@ impl ::prost::Name for Uptime { const NAME: &'static str = "Uptime"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.Uptime".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.Uptime".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Tracks our view of Tendermint's view of the validator set, so we can keep it /// from getting confused. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CurrentConsensusKeys { #[prost(message, repeated, tag = "1")] @@ -610,13 +575,11 @@ impl ::prost::Name for CurrentConsensusKeys { const NAME: &'static str = "CurrentConsensusKeys"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.CurrentConsensusKeys".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.CurrentConsensusKeys".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Tracks slashing penalties applied to a validator in some epoch. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Penalty { #[prost(bytes = "vec", tag = "1")] @@ -626,13 +589,11 @@ impl ::prost::Name for Penalty { const NAME: &'static str = "Penalty"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.Penalty".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.Penalty".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Requests information about a specific validator. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetValidatorInfoRequest { /// The identity key of the validator. @@ -643,12 +604,10 @@ impl ::prost::Name for GetValidatorInfoRequest { const NAME: &'static str = "GetValidatorInfoRequest"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.GetValidatorInfoRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.GetValidatorInfoRequest".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetValidatorInfoResponse { #[prost(message, optional, tag = "1")] @@ -658,14 +617,12 @@ impl ::prost::Name for GetValidatorInfoResponse { const NAME: &'static str = "GetValidatorInfoResponse"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.GetValidatorInfoResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.GetValidatorInfoResponse".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Requests information on the chain's validators. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorInfoRequest { /// Whether or not to return inactive validators #[prost(bool, tag = "2")] @@ -675,12 +632,10 @@ impl ::prost::Name for ValidatorInfoRequest { const NAME: &'static str = "ValidatorInfoRequest"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorInfoRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorInfoRequest".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorInfoResponse { #[prost(message, optional, tag = "1")] @@ -690,12 +645,10 @@ impl ::prost::Name for ValidatorInfoResponse { const NAME: &'static str = "ValidatorInfoResponse"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorInfoResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorInfoResponse".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorStatusRequest { #[prost(message, optional, tag = "2")] @@ -705,12 +658,10 @@ impl ::prost::Name for ValidatorStatusRequest { const NAME: &'static str = "ValidatorStatusRequest"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorStatusRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorStatusRequest".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorStatusResponse { #[prost(message, optional, tag = "1")] @@ -720,13 +671,11 @@ impl ::prost::Name for ValidatorStatusResponse { const NAME: &'static str = "ValidatorStatusResponse"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorStatusResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorStatusResponse".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Requests the compounded penalty for a validator over a range of epochs. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorPenaltyRequest { #[prost(message, optional, tag = "2")] @@ -740,12 +689,10 @@ impl ::prost::Name for ValidatorPenaltyRequest { const NAME: &'static str = "ValidatorPenaltyRequest"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorPenaltyRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorPenaltyRequest".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorPenaltyResponse { #[prost(message, optional, tag = "1")] @@ -755,12 +702,10 @@ impl ::prost::Name for ValidatorPenaltyResponse { const NAME: &'static str = "ValidatorPenaltyResponse"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorPenaltyResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorPenaltyResponse".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CurrentValidatorRateRequest { #[prost(message, optional, tag = "2")] @@ -770,12 +715,10 @@ impl ::prost::Name for CurrentValidatorRateRequest { const NAME: &'static str = "CurrentValidatorRateRequest"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.CurrentValidatorRateRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.CurrentValidatorRateRequest".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CurrentValidatorRateResponse { #[prost(message, optional, tag = "1")] @@ -785,12 +728,10 @@ impl ::prost::Name for CurrentValidatorRateResponse { const NAME: &'static str = "CurrentValidatorRateResponse"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.CurrentValidatorRateResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.CurrentValidatorRateResponse".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorUptimeRequest { #[prost(message, optional, tag = "2")] @@ -800,12 +741,10 @@ impl ::prost::Name for ValidatorUptimeRequest { const NAME: &'static str = "ValidatorUptimeRequest"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorUptimeRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorUptimeRequest".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorUptimeResponse { #[prost(message, optional, tag = "1")] @@ -815,14 +754,12 @@ impl ::prost::Name for ValidatorUptimeResponse { const NAME: &'static str = "ValidatorUptimeResponse"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.ValidatorUptimeResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.ValidatorUptimeResponse".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Staking configuration data. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct StakeParameters { /// The number of epochs an unbonding note for before being released. #[deprecated] @@ -860,13 +797,11 @@ impl ::prost::Name for StakeParameters { const NAME: &'static str = "StakeParameters"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.StakeParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.StakeParameters".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Genesis data for the staking component. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GenesisContent { /// The configuration parameters for the staking component present at genesis @@ -880,12 +815,10 @@ impl ::prost::Name for GenesisContent { const NAME: &'static str = "GenesisContent"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.GenesisContent".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.GenesisContent".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventTombstoneValidator { /// The height at which the offense occurred. @@ -908,12 +841,10 @@ impl ::prost::Name for EventTombstoneValidator { const NAME: &'static str = "EventTombstoneValidator"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventTombstoneValidator".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventTombstoneValidator".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValidatorStateChange { /// The validator's identity key. @@ -927,12 +858,10 @@ impl ::prost::Name for EventValidatorStateChange { const NAME: &'static str = "EventValidatorStateChange"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventValidatorStateChange".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventValidatorStateChange".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValidatorVotingPowerChange { /// The validator's identity key. @@ -946,12 +875,10 @@ impl ::prost::Name for EventValidatorVotingPowerChange { const NAME: &'static str = "EventValidatorVotingPowerChange"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventValidatorVotingPowerChange".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventValidatorVotingPowerChange".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValidatorBondingStateChange { /// The validator's identity key. @@ -965,12 +892,10 @@ impl ::prost::Name for EventValidatorBondingStateChange { const NAME: &'static str = "EventValidatorBondingStateChange"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventValidatorBondingStateChange".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventValidatorBondingStateChange".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventRateDataChange { /// The validator's identity key. @@ -984,12 +909,10 @@ impl ::prost::Name for EventRateDataChange { const NAME: &'static str = "EventRateDataChange"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventRateDataChange".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventRateDataChange".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValidatorDefinitionUpload { /// The validator definition. @@ -1000,12 +923,10 @@ impl ::prost::Name for EventValidatorDefinitionUpload { const NAME: &'static str = "EventValidatorDefinitionUpload"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventValidatorDefinitionUpload".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventValidatorDefinitionUpload".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventValidatorMissedBlock { /// The validator's identity key. @@ -1016,12 +937,10 @@ impl ::prost::Name for EventValidatorMissedBlock { const NAME: &'static str = "EventValidatorMissedBlock"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventValidatorMissedBlock".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventValidatorMissedBlock".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventDelegate { /// The validator's identity key. @@ -1035,12 +954,10 @@ impl ::prost::Name for EventDelegate { const NAME: &'static str = "EventDelegate"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventDelegate".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventDelegate".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventUndelegate { /// The validator's identity key. @@ -1054,13 +971,11 @@ impl ::prost::Name for EventUndelegate { const NAME: &'static str = "EventUndelegate"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventUndelegate".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventUndelegate".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Indicates a slashing penalty was applied to a validator's reward rates. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventSlashingPenaltyApplied { /// The validator's identity key. @@ -1077,22 +992,13 @@ impl ::prost::Name for EventSlashingPenaltyApplied { const NAME: &'static str = "EventSlashingPenaltyApplied"; const PACKAGE: &'static str = "penumbra.core.component.stake.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.component.stake.v1.EventSlashingPenaltyApplied".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.component.stake.v1.EventSlashingPenaltyApplied".into() + ::prost::alloc::format!("penumbra.core.component.stake.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod query_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Query operations for the staking component. @@ -1115,8 +1021,8 @@ pub mod query_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -1141,7 +1047,7 @@ pub mod query_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { QueryServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -1188,7 +1094,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1218,7 +1125,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1247,7 +1155,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1276,7 +1185,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1305,7 +1215,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1334,7 +1245,8 @@ pub mod query_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1357,17 +1269,11 @@ pub mod query_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod query_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with QueryServiceServer. #[async_trait] - pub trait QueryService: std::marker::Send + std::marker::Sync + 'static { + pub trait QueryService: Send + Sync + 'static { /// Queries for information about a specific validator. async fn get_validator_info( &self, @@ -1380,7 +1286,7 @@ pub mod query_service_server { type ValidatorInfoStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Queries the current validator set, with filtering. async fn validator_info( @@ -1421,18 +1327,20 @@ pub mod query_service_server { } /// Query operations for the staking component. #[derive(Debug)] - pub struct QueryServiceServer { - inner: Arc, + pub struct QueryServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl QueryServiceServer { + struct _Inner(Arc); + impl QueryServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -1482,8 +1390,8 @@ pub mod query_service_server { impl tonic::codegen::Service> for QueryServiceServer where T: QueryService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -1495,6 +1403,7 @@ pub mod query_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.core.component.stake.v1.QueryService/GetValidatorInfo" => { #[allow(non_camel_case_types)] @@ -1526,6 +1435,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = GetValidatorInfoSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1572,6 +1482,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ValidatorInfoSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1617,6 +1528,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ValidatorStatusSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1663,6 +1575,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ValidatorPenaltySvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1709,6 +1622,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CurrentValidatorRateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1754,6 +1668,7 @@ pub mod query_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ValidatorUptimeSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1772,25 +1687,20 @@ pub mod query_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for QueryServiceServer { + impl Clone for QueryServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -1802,9 +1712,17 @@ pub mod query_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.core.component.stake.v1.QueryService"; - impl tonic::server::NamedService for QueryServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for QueryServiceServer { + const NAME: &'static str = "penumbra.core.component.stake.v1.QueryService"; } } diff --git a/crates/proto/src/gen/penumbra.core.component.stake.v1.serde.rs b/crates/proto/src/gen/penumbra.core.component.stake.v1.serde.rs index 64e228715c..e3f0f42615 100644 --- a/crates/proto/src/gen/penumbra.core.component.stake.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.component.stake.v1.serde.rs @@ -18,7 +18,6 @@ impl serde::Serialize for BaseRateData { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.stake.v1.BaseRateData", len)?; if self.epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochIndex", ToString::to_string(&self.epoch_index).as_str())?; } if let Some(v) = self.base_reward_rate.as_ref() { @@ -159,12 +158,10 @@ impl serde::Serialize for BondingState { } if self.unbonds_at_epoch != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("unbondsAtEpoch", ToString::to_string(&self.unbonds_at_epoch).as_str())?; } if self.unbonds_at_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("unbondsAtHeight", ToString::to_string(&self.unbonds_at_height).as_str())?; } struct_ser.end() @@ -665,7 +662,6 @@ impl serde::Serialize for Delegate { } if self.epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochIndex", ToString::to_string(&self.epoch_index).as_str())?; } if let Some(v) = self.unbonded_amount.as_ref() { @@ -1155,7 +1151,6 @@ impl serde::Serialize for EventSlashingPenaltyApplied { } if self.epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochIndex", ToString::to_string(&self.epoch_index).as_str())?; } if let Some(v) = self.new_penalty.as_ref() { @@ -1294,12 +1289,10 @@ impl serde::Serialize for EventTombstoneValidator { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.stake.v1.EventTombstoneValidator", len)?; if self.evidence_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("evidenceHeight", ToString::to_string(&self.evidence_height).as_str())?; } if self.current_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("currentHeight", ToString::to_string(&self.current_height).as_str())?; } if let Some(v) = self.identity_key.as_ref() { @@ -1307,12 +1300,10 @@ impl serde::Serialize for EventTombstoneValidator { } if !self.address.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("address", pbjson::private::base64::encode(&self.address).as_str())?; } if self.voting_power != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("votingPower", ToString::to_string(&self.voting_power).as_str())?; } struct_ser.end() @@ -2743,7 +2734,6 @@ impl serde::Serialize for Penalty { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.stake.v1.Penalty", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -2854,7 +2844,6 @@ impl serde::Serialize for RateData { } if self.epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochIndex", ToString::to_string(&self.epoch_index).as_str())?; } if let Some(v) = self.validator_reward_rate.as_ref() { @@ -3020,37 +3009,30 @@ impl serde::Serialize for StakeParameters { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.stake.v1.StakeParameters", len)?; if self.unbonding_epochs != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("unbondingEpochs", ToString::to_string(&self.unbonding_epochs).as_str())?; } if self.active_validator_limit != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("activeValidatorLimit", ToString::to_string(&self.active_validator_limit).as_str())?; } if self.base_reward_rate != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("baseRewardRate", ToString::to_string(&self.base_reward_rate).as_str())?; } if self.slashing_penalty_misbehavior != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("slashingPenaltyMisbehavior", ToString::to_string(&self.slashing_penalty_misbehavior).as_str())?; } if self.slashing_penalty_downtime != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("slashingPenaltyDowntime", ToString::to_string(&self.slashing_penalty_downtime).as_str())?; } if self.signed_blocks_window_len != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("signedBlocksWindowLen", ToString::to_string(&self.signed_blocks_window_len).as_str())?; } if self.missed_blocks_maximum != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("missedBlocksMaximum", ToString::to_string(&self.missed_blocks_maximum).as_str())?; } if let Some(v) = self.min_validator_stake.as_ref() { @@ -3058,7 +3040,6 @@ impl serde::Serialize for StakeParameters { } if self.unbonding_delay != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("unbondingDelay", ToString::to_string(&self.unbonding_delay).as_str())?; } struct_ser.end() @@ -3283,7 +3264,6 @@ impl serde::Serialize for Undelegate { } if self.start_epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startEpochIndex", ToString::to_string(&self.start_epoch_index).as_str())?; } if let Some(v) = self.unbonded_amount.as_ref() { @@ -3446,7 +3426,6 @@ impl serde::Serialize for UndelegateClaim { } if !self.proof.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proof", pbjson::private::base64::encode(&self.proof).as_str())?; } struct_ser.end() @@ -3571,7 +3550,6 @@ impl serde::Serialize for UndelegateClaimBody { } if self.start_epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startEpochIndex", ToString::to_string(&self.start_epoch_index).as_str())?; } if let Some(v) = self.penalty.as_ref() { @@ -3582,7 +3560,6 @@ impl serde::Serialize for UndelegateClaimBody { } if self.unbonding_start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("unbondingStartHeight", ToString::to_string(&self.unbonding_start_height).as_str())?; } struct_ser.end() @@ -3755,7 +3732,6 @@ impl serde::Serialize for UndelegateClaimPlan { } if self.start_epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startEpochIndex", ToString::to_string(&self.start_epoch_index).as_str())?; } if let Some(v) = self.penalty.as_ref() { @@ -3766,22 +3742,18 @@ impl serde::Serialize for UndelegateClaimPlan { } if !self.balance_blinding.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("balanceBlinding", pbjson::private::base64::encode(&self.balance_blinding).as_str())?; } if !self.proof_blinding_r.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingR", pbjson::private::base64::encode(&self.proof_blinding_r).as_str())?; } if !self.proof_blinding_s.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofBlindingS", pbjson::private::base64::encode(&self.proof_blinding_s).as_str())?; } if self.unbonding_start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("unbondingStartHeight", ToString::to_string(&self.unbonding_start_height).as_str())?; } struct_ser.end() @@ -3978,7 +3950,6 @@ impl serde::Serialize for Uptime { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.stake.v1.Uptime", len)?; if self.as_of_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("asOfBlockHeight", ToString::to_string(&self.as_of_block_height).as_str())?; } if self.window_len != 0 { @@ -3986,7 +3957,6 @@ impl serde::Serialize for Uptime { } if !self.bitvec.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("bitvec", pbjson::private::base64::encode(&self.bitvec).as_str())?; } struct_ser.end() @@ -4140,7 +4110,6 @@ impl serde::Serialize for Validator { } if !self.consensus_key.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("consensusKey", pbjson::private::base64::encode(&self.consensus_key).as_str())?; } if !self.name.is_empty() { @@ -4361,7 +4330,6 @@ impl serde::Serialize for ValidatorDefinition { } if !self.auth_sig.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("authSig", pbjson::private::base64::encode(&self.auth_sig).as_str())?; } struct_ser.end() @@ -4899,12 +4867,10 @@ impl serde::Serialize for ValidatorPenaltyRequest { } if self.start_epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startEpochIndex", ToString::to_string(&self.start_epoch_index).as_str())?; } if self.end_epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endEpochIndex", ToString::to_string(&self.end_epoch_index).as_str())?; } struct_ser.end() @@ -5839,7 +5805,6 @@ impl serde::Serialize for ZkUndelegateClaimProof { let mut struct_ser = serializer.serialize_struct("penumbra.core.component.stake.v1.ZKUndelegateClaimProof", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.keys.v1.rs b/crates/proto/src/gen/penumbra.core.keys.v1.rs index c59d5084e7..6ba8e5c292 100644 --- a/crates/proto/src/gen/penumbra.core.keys.v1.rs +++ b/crates/proto/src/gen/penumbra.core.keys.v1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// A Penumbra address. An address in Penumbra is a Bech32m-encoded /// string, with the human-readable prefix (HRP) `penumbrav2t`. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Address { /// The bytes of the address. Must be represented as a series of @@ -21,12 +21,10 @@ impl ::prost::Name for Address { const NAME: &'static str = "Address"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.Address".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.Address".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AddressView { #[prost(oneof = "address_view::AddressView", tags = "1, 2")] @@ -35,6 +33,7 @@ pub struct AddressView { /// Nested message and enum types in `AddressView`. pub mod address_view { /// A decoded address, with information about the address index and wallet ID visible. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Decoded { #[prost(message, optional, tag = "1")] @@ -48,13 +47,11 @@ pub mod address_view { const NAME: &'static str = "Decoded"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.AddressView.Decoded".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.AddressView.Decoded".into() + ::prost::alloc::format!("penumbra.core.keys.v1.AddressView.{}", Self::NAME) } } /// An opaque address, with no information about the address index or wallet ID visible. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Opaque { #[prost(message, optional, tag = "1")] @@ -64,12 +61,10 @@ pub mod address_view { const NAME: &'static str = "Opaque"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.AddressView.Opaque".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.AddressView.Opaque".into() + ::prost::alloc::format!("penumbra.core.keys.v1.AddressView.{}", Self::NAME) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum AddressView { #[prost(message, tag = "1")] @@ -82,12 +77,10 @@ impl ::prost::Name for AddressView { const NAME: &'static str = "AddressView"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.AddressView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.AddressView".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PayloadKey { #[prost(bytes = "vec", tag = "1")] @@ -97,12 +90,10 @@ impl ::prost::Name for PayloadKey { const NAME: &'static str = "PayloadKey"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.PayloadKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.PayloadKey".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpendKey { #[prost(bytes = "vec", tag = "1")] @@ -112,12 +103,10 @@ impl ::prost::Name for SpendKey { const NAME: &'static str = "SpendKey"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.SpendKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.SpendKey".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct FullViewingKey { #[prost(bytes = "vec", tag = "1")] @@ -127,12 +116,10 @@ impl ::prost::Name for FullViewingKey { const NAME: &'static str = "FullViewingKey"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.FullViewingKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.FullViewingKey".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WalletId { #[prost(bytes = "vec", tag = "1")] @@ -142,12 +129,10 @@ impl ::prost::Name for WalletId { const NAME: &'static str = "WalletId"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.WalletId".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.WalletId".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Diversifier { #[prost(bytes = "vec", tag = "1")] @@ -157,12 +142,10 @@ impl ::prost::Name for Diversifier { const NAME: &'static str = "Diversifier"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.Diversifier".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.Diversifier".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AddressIndex { #[prost(uint32, tag = "2")] @@ -174,13 +157,11 @@ impl ::prost::Name for AddressIndex { const NAME: &'static str = "AddressIndex"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.AddressIndex".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.AddressIndex".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } /// A validator's identity key (decaf377-rdsa spendauth verification key). +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct IdentityKey { #[prost(bytes = "vec", tag = "1")] @@ -190,13 +171,11 @@ impl ::prost::Name for IdentityKey { const NAME: &'static str = "IdentityKey"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.IdentityKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.IdentityKey".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } /// A validator's governance key (decaf377-rdsa spendauth verification key). +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GovernanceKey { #[prost(bytes = "vec", tag = "1")] @@ -206,12 +185,10 @@ impl ::prost::Name for GovernanceKey { const NAME: &'static str = "GovernanceKey"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.GovernanceKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.GovernanceKey".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ConsensusKey { #[prost(bytes = "vec", tag = "1")] @@ -221,9 +198,6 @@ impl ::prost::Name for ConsensusKey { const NAME: &'static str = "ConsensusKey"; const PACKAGE: &'static str = "penumbra.core.keys.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.keys.v1.ConsensusKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.keys.v1.ConsensusKey".into() + ::prost::alloc::format!("penumbra.core.keys.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.core.keys.v1.serde.rs b/crates/proto/src/gen/penumbra.core.keys.v1.serde.rs index 273b9fa030..3413390f9f 100644 --- a/crates/proto/src/gen/penumbra.core.keys.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.keys.v1.serde.rs @@ -15,7 +15,6 @@ impl serde::Serialize for Address { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.Address", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } if !self.alt_bech32m.is_empty() { @@ -135,7 +134,6 @@ impl serde::Serialize for AddressIndex { } if !self.randomizer.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("randomizer", pbjson::private::base64::encode(&self.randomizer).as_str())?; } struct_ser.end() @@ -585,7 +583,6 @@ impl serde::Serialize for ConsensusKey { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.ConsensusKey", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -684,7 +681,6 @@ impl serde::Serialize for Diversifier { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.Diversifier", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -783,7 +779,6 @@ impl serde::Serialize for FullViewingKey { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.FullViewingKey", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -882,7 +877,6 @@ impl serde::Serialize for GovernanceKey { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.GovernanceKey", len)?; if !self.gk.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("gk", pbjson::private::base64::encode(&self.gk).as_str())?; } struct_ser.end() @@ -981,7 +975,6 @@ impl serde::Serialize for IdentityKey { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.IdentityKey", len)?; if !self.ik.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("ik", pbjson::private::base64::encode(&self.ik).as_str())?; } struct_ser.end() @@ -1080,7 +1073,6 @@ impl serde::Serialize for PayloadKey { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.PayloadKey", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -1179,7 +1171,6 @@ impl serde::Serialize for SpendKey { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.SpendKey", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -1278,7 +1269,6 @@ impl serde::Serialize for WalletId { let mut struct_ser = serializer.serialize_struct("penumbra.core.keys.v1.WalletId", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.num.v1.rs b/crates/proto/src/gen/penumbra.core.num.v1.rs index b984486038..fa7b875da5 100644 --- a/crates/proto/src/gen/penumbra.core.num.v1.rs +++ b/crates/proto/src/gen/penumbra.core.num.v1.rs @@ -1,10 +1,10 @@ -// This file is @generated by prost-build. /// The quantity of a particular Asset. Represented as a 128-bit unsigned integer, /// split over two fields, `lo` and `hi`, representing the low- and high-order bytes /// of the 128-bit value, respectively. Clients must assemble these bits in their /// implementation into a `uint128` or comparable data structure, in order to model /// the Amount accurately. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Amount { #[prost(uint64, tag = "1")] pub lo: u64, @@ -15,9 +15,6 @@ impl ::prost::Name for Amount { const NAME: &'static str = "Amount"; const PACKAGE: &'static str = "penumbra.core.num.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.num.v1.Amount".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.num.v1.Amount".into() + ::prost::alloc::format!("penumbra.core.num.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.core.num.v1.serde.rs b/crates/proto/src/gen/penumbra.core.num.v1.serde.rs index fa6c06fdb4..f87f4a5f9e 100644 --- a/crates/proto/src/gen/penumbra.core.num.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.num.v1.serde.rs @@ -15,12 +15,10 @@ impl serde::Serialize for Amount { let mut struct_ser = serializer.serialize_struct("penumbra.core.num.v1.Amount", len)?; if self.lo != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("lo", ToString::to_string(&self.lo).as_str())?; } if self.hi != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("hi", ToString::to_string(&self.hi).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.core.transaction.v1.rs b/crates/proto/src/gen/penumbra.core.transaction.v1.rs index 2295d26332..0cf2173a8b 100644 --- a/crates/proto/src/gen/penumbra.core.transaction.v1.rs +++ b/crates/proto/src/gen/penumbra.core.transaction.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// A Penumbra transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Transaction { #[prost(message, optional, tag = "1")] @@ -18,13 +18,11 @@ impl ::prost::Name for Transaction { const NAME: &'static str = "Transaction"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.Transaction".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.Transaction".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// The body of a transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionBody { /// A list of actions (state changes) performed by this transaction. @@ -46,13 +44,11 @@ impl ::prost::Name for TransactionBody { const NAME: &'static str = "TransactionBody"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionBody".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionBody".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// The parameters determining if a transaction should be accepted by the chain. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionParameters { /// The maximum height that this transaction can be included in the chain. @@ -72,13 +68,11 @@ impl ::prost::Name for TransactionParameters { const NAME: &'static str = "TransactionParameters"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionParameters".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionParameters".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// Represents a transaction summary containing multiple effects. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionSummary { #[prost(message, repeated, tag = "1")] @@ -87,6 +81,7 @@ pub struct TransactionSummary { /// Nested message and enum types in `TransactionSummary`. pub mod transaction_summary { /// Represents an individual effect of a transaction. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Effects { #[prost(message, optional, tag = "1")] @@ -98,10 +93,9 @@ pub mod transaction_summary { const NAME: &'static str = "Effects"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionSummary.Effects".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionSummary.Effects".into() + ::prost::alloc::format!( + "penumbra.core.transaction.v1.TransactionSummary.{}", Self::NAME + ) } } } @@ -109,13 +103,11 @@ impl ::prost::Name for TransactionSummary { const NAME: &'static str = "TransactionSummary"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionSummary".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionSummary".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// Detection data used by a detection server performing Fuzzy Message Detection. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DetectionData { /// A list of clues for use with Fuzzy Message Detection. @@ -128,13 +120,11 @@ impl ::prost::Name for DetectionData { const NAME: &'static str = "DetectionData"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.DetectionData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.DetectionData".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// A state change performed by a transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Action { #[prost( @@ -145,6 +135,7 @@ pub struct Action { } /// Nested message and enum types in `Action`. pub mod action { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Action { /// Common actions have numbers \< 15, to save space. @@ -229,14 +220,12 @@ impl ::prost::Name for Action { const NAME: &'static str = "Action"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.Action".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.Action".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// A transaction perspective is a bundle of key material and commitment openings /// that allow generating a view of a transaction from that perspective. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionPerspective { #[prost(message, repeated, tag = "1")] @@ -284,6 +273,7 @@ pub struct TransactionPerspective { } /// Nested message and enum types in `TransactionPerspective`. pub mod transaction_perspective { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ExtendedMetadataById { #[prost(message, optional, tag = "1")] @@ -295,12 +285,9 @@ pub mod transaction_perspective { const NAME: &'static str = "ExtendedMetadataById"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionPerspective.ExtendedMetadataById" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionPerspective.ExtendedMetadataById" - .into() + ::prost::alloc::format!( + "penumbra.core.transaction.v1.TransactionPerspective.{}", Self::NAME + ) } } /// Associates a nullifier with the transaction ID that created the nullified state commitment. @@ -308,6 +295,7 @@ pub mod transaction_perspective { /// Note: this is *not* the transaction ID that revealed the nullifier. /// /// Allows walking backwards from a spend to the transaction that created the note. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CreationTransactionIdByNullifier { #[prost(message, optional, tag = "1")] @@ -323,17 +311,15 @@ pub mod transaction_perspective { const NAME: &'static str = "CreationTransactionIdByNullifier"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionPerspective.CreationTransactionIdByNullifier" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionPerspective.CreationTransactionIdByNullifier" - .into() + ::prost::alloc::format!( + "penumbra.core.transaction.v1.TransactionPerspective.{}", Self::NAME + ) } } /// Associates a commitment with the transaction ID that eventually nullified it. /// /// Allows walking forwards from an output to the transaction that spent the note. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NullificationTransactionIdByCommitment { #[prost(message, optional, tag = "1")] @@ -349,12 +335,9 @@ pub mod transaction_perspective { const NAME: &'static str = "NullificationTransactionIdByCommitment"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionPerspective.NullificationTransactionIdByCommitment" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionPerspective.NullificationTransactionIdByCommitment" - .into() + ::prost::alloc::format!( + "penumbra.core.transaction.v1.TransactionPerspective.{}", Self::NAME + ) } } } @@ -362,12 +345,10 @@ impl ::prost::Name for TransactionPerspective { const NAME: &'static str = "TransactionPerspective"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionPerspective".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionPerspective".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PayloadKeyWithCommitment { #[prost(message, optional, tag = "1")] @@ -381,12 +362,10 @@ impl ::prost::Name for PayloadKeyWithCommitment { const NAME: &'static str = "PayloadKeyWithCommitment"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.PayloadKeyWithCommitment".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.PayloadKeyWithCommitment".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NullifierWithNote { #[prost(message, optional, tag = "1")] @@ -398,13 +377,11 @@ impl ::prost::Name for NullifierWithNote { const NAME: &'static str = "NullifierWithNote"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.NullifierWithNote".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.NullifierWithNote".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// View of a Penumbra transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionView { /// View of the transaction body @@ -424,12 +401,10 @@ impl ::prost::Name for TransactionView { const NAME: &'static str = "TransactionView"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionView".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionBodyView { /// A list views into of actions (state changes) performed by this transaction. @@ -451,13 +426,11 @@ impl ::prost::Name for TransactionBodyView { const NAME: &'static str = "TransactionBodyView"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionBodyView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionBodyView".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// A view of a specific state change action performed by a transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionView { #[prost( @@ -468,6 +441,7 @@ pub struct ActionView { } /// Nested message and enum types in `ActionView`. pub mod action_view { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum ActionView { /// Action types with visible/opaque variants @@ -554,13 +528,11 @@ impl ::prost::Name for ActionView { const NAME: &'static str = "ActionView"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.ActionView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.ActionView".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// The data required to authorize a transaction plan. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizationData { /// The computed auth hash for the approved transaction plan. @@ -583,13 +555,11 @@ impl ::prost::Name for AuthorizationData { const NAME: &'static str = "AuthorizationData"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.AuthorizationData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.AuthorizationData".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// The data required for proving when building a transaction from a plan. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WitnessData { /// The anchor for the state transition proofs. @@ -606,10 +576,7 @@ impl ::prost::Name for WitnessData { const NAME: &'static str = "WitnessData"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.WitnessData".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.WitnessData".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// Describes a planned transaction. Permits clients to prepare a transaction @@ -617,6 +584,7 @@ impl ::prost::Name for WitnessData { /// /// The `TransactionPlan` is a fully determined bundle binding all of a transaction's effects. /// The only thing it does not include is the witness data used for proving. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionPlan { /// The sequence of actions planned for this transaction. @@ -636,12 +604,10 @@ impl ::prost::Name for TransactionPlan { const NAME: &'static str = "TransactionPlan"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.TransactionPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.TransactionPlan".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DetectionDataPlan { #[prost(message, repeated, tag = "5")] @@ -651,16 +617,14 @@ impl ::prost::Name for DetectionDataPlan { const NAME: &'static str = "DetectionDataPlan"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.DetectionDataPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.DetectionDataPlan".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// Describes a planned transaction action. /// /// Some transaction Actions don't have any private data and are treated as being plans /// themselves. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionPlan { #[prost( @@ -671,6 +635,7 @@ pub struct ActionPlan { } /// Nested message and enum types in `ActionPlan`. pub mod action_plan { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Action { #[prost(message, tag = "1")] @@ -759,13 +724,11 @@ impl ::prost::Name for ActionPlan { const NAME: &'static str = "ActionPlan"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.ActionPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.ActionPlan".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// Describes a plan for forming a `Clue`. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CluePlan { /// The address. @@ -782,13 +745,11 @@ impl ::prost::Name for CluePlan { const NAME: &'static str = "CluePlan"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.CluePlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.CluePlan".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// Describes a plan for forming the transaction memo. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MemoPlan { /// The plaintext. @@ -802,13 +763,11 @@ impl ::prost::Name for MemoPlan { const NAME: &'static str = "MemoPlan"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.MemoPlan".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.MemoPlan".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// The encrypted memo data describing information about the purpose of a transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MemoCiphertext { /// The encrypted data. 528 bytes. @@ -819,13 +778,11 @@ impl ::prost::Name for MemoCiphertext { const NAME: &'static str = "MemoCiphertext"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.MemoCiphertext".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.MemoCiphertext".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } /// The plaintext describing information about the purpose of a transaction. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MemoPlaintext { /// The sender's return address. @@ -842,12 +799,10 @@ impl ::prost::Name for MemoPlaintext { const NAME: &'static str = "MemoPlaintext"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.MemoPlaintext".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.MemoPlaintext".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MemoPlaintextView { #[prost(message, optional, tag = "1")] @@ -859,12 +814,10 @@ impl ::prost::Name for MemoPlaintextView { const NAME: &'static str = "MemoPlaintextView"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.MemoPlaintextView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.MemoPlaintextView".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MemoView { #[prost(oneof = "memo_view::MemoView", tags = "1, 2")] @@ -872,6 +825,7 @@ pub struct MemoView { } /// Nested message and enum types in `MemoView`. pub mod memo_view { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Visible { #[prost(message, optional, tag = "1")] @@ -883,12 +837,12 @@ pub mod memo_view { const NAME: &'static str = "Visible"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.MemoView.Visible".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.MemoView.Visible".into() + ::prost::alloc::format!( + "penumbra.core.transaction.v1.MemoView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Opaque { #[prost(message, optional, tag = "1")] @@ -898,12 +852,12 @@ pub mod memo_view { const NAME: &'static str = "Opaque"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.MemoView.Opaque".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.MemoView.Opaque".into() + ::prost::alloc::format!( + "penumbra.core.transaction.v1.MemoView.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum MemoView { #[prost(message, tag = "1")] @@ -916,9 +870,6 @@ impl ::prost::Name for MemoView { const NAME: &'static str = "MemoView"; const PACKAGE: &'static str = "penumbra.core.transaction.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.transaction.v1.MemoView".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.transaction.v1.MemoView".into() + ::prost::alloc::format!("penumbra.core.transaction.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.core.transaction.v1.serde.rs b/crates/proto/src/gen/penumbra.core.transaction.v1.serde.rs index ba9d7347a8..bd6767749d 100644 --- a/crates/proto/src/gen/penumbra.core.transaction.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.transaction.v1.serde.rs @@ -1449,12 +1449,10 @@ impl serde::Serialize for CluePlan { } if !self.rseed.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("rseed", pbjson::private::base64::encode(&self.rseed).as_str())?; } if self.precision_bits != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("precisionBits", ToString::to_string(&self.precision_bits).as_str())?; } struct_ser.end() @@ -1770,7 +1768,6 @@ impl serde::Serialize for MemoCiphertext { let mut struct_ser = serializer.serialize_struct("penumbra.core.transaction.v1.MemoCiphertext", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -2101,7 +2098,6 @@ impl serde::Serialize for MemoPlan { } if !self.key.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("key", pbjson::private::base64::encode(&self.key).as_str())?; } struct_ser.end() @@ -3190,7 +3186,6 @@ impl serde::Serialize for TransactionParameters { let mut struct_ser = serializer.serialize_struct("penumbra.core.transaction.v1.TransactionParameters", len)?; if self.expiry_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("expiryHeight", ToString::to_string(&self.expiry_height).as_str())?; } if !self.chain_id.is_empty() { diff --git a/crates/proto/src/gen/penumbra.core.txhash.v1.rs b/crates/proto/src/gen/penumbra.core.txhash.v1.rs index f595a962b5..35d8ae86e0 100644 --- a/crates/proto/src/gen/penumbra.core.txhash.v1.rs +++ b/crates/proto/src/gen/penumbra.core.txhash.v1.rs @@ -1,6 +1,6 @@ -// This file is @generated by prost-build. /// The hash of a Penumbra transaction's *effecting data*, describing the effects /// of the transaction on the chain state. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EffectHash { #[prost(bytes = "vec", tag = "1")] @@ -10,15 +10,13 @@ impl ::prost::Name for EffectHash { const NAME: &'static str = "EffectHash"; const PACKAGE: &'static str = "penumbra.core.txhash.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.txhash.v1.EffectHash".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.txhash.v1.EffectHash".into() + ::prost::alloc::format!("penumbra.core.txhash.v1.{}", Self::NAME) } } /// A transaction ID, the Sha256 hash of a transaction. /// /// This is the hash of the plain byte encoding, used by Tendermint. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionId { #[prost(bytes = "vec", tag = "1")] @@ -28,9 +26,6 @@ impl ::prost::Name for TransactionId { const NAME: &'static str = "TransactionId"; const PACKAGE: &'static str = "penumbra.core.txhash.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.core.txhash.v1.TransactionId".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.core.txhash.v1.TransactionId".into() + ::prost::alloc::format!("penumbra.core.txhash.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.core.txhash.v1.serde.rs b/crates/proto/src/gen/penumbra.core.txhash.v1.serde.rs index d2a884b62c..afd53bee03 100644 --- a/crates/proto/src/gen/penumbra.core.txhash.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.core.txhash.v1.serde.rs @@ -12,7 +12,6 @@ impl serde::Serialize for EffectHash { let mut struct_ser = serializer.serialize_struct("penumbra.core.txhash.v1.EffectHash", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -111,7 +110,6 @@ impl serde::Serialize for TransactionId { let mut struct_ser = serializer.serialize_struct("penumbra.core.txhash.v1.TransactionId", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.crypto.decaf377_fmd.v1.rs b/crates/proto/src/gen/penumbra.crypto.decaf377_fmd.v1.rs index ba9d3eb3ed..129a5e4b65 100644 --- a/crates/proto/src/gen/penumbra.crypto.decaf377_fmd.v1.rs +++ b/crates/proto/src/gen/penumbra.crypto.decaf377_fmd.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// A clue for use with Fuzzy Message Detection. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Clue { #[prost(bytes = "vec", tag = "1")] @@ -9,9 +9,6 @@ impl ::prost::Name for Clue { const NAME: &'static str = "Clue"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_fmd.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_fmd.v1.Clue".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_fmd.v1.Clue".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_fmd.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.crypto.decaf377_fmd.v1.serde.rs b/crates/proto/src/gen/penumbra.crypto.decaf377_fmd.v1.serde.rs index 15c6d1fdfb..bbd9bf91bb 100644 --- a/crates/proto/src/gen/penumbra.crypto.decaf377_fmd.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.crypto.decaf377_fmd.v1.serde.rs @@ -12,7 +12,6 @@ impl serde::Serialize for Clue { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.decaf377_fmd.v1.Clue", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.crypto.decaf377_frost.v1.rs b/crates/proto/src/gen/penumbra.crypto.decaf377_frost.v1.rs index e544c354ea..6c87752e14 100644 --- a/crates/proto/src/gen/penumbra.crypto.decaf377_frost.v1.rs +++ b/crates/proto/src/gen/penumbra.crypto.decaf377_frost.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// A commitment to a polynomial, as a list of group elements. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct VerifiableSecretSharingCommitment { /// Each of these bytes should be the serialization of a group element. @@ -10,13 +10,11 @@ impl ::prost::Name for VerifiableSecretSharingCommitment { const NAME: &'static str = "VerifiableSecretSharingCommitment"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_frost.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_frost.v1.VerifiableSecretSharingCommitment".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_frost.v1.VerifiableSecretSharingCommitment".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_frost.v1.{}", Self::NAME) } } /// The public package sent in round 1 of the DKG protocol. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DkgRound1Package { /// A commitment to the polynomial for secret sharing. @@ -30,13 +28,11 @@ impl ::prost::Name for DkgRound1Package { const NAME: &'static str = "DKGRound1Package"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_frost.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_frost.v1.DKGRound1Package".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_frost.v1.DKGRound1Package".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_frost.v1.{}", Self::NAME) } } /// A share of the final signing key. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SigningShare { /// These bytes should be a valid scalar. @@ -47,13 +43,11 @@ impl ::prost::Name for SigningShare { const NAME: &'static str = "SigningShare"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_frost.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_frost.v1.SigningShare".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_frost.v1.SigningShare".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_frost.v1.{}", Self::NAME) } } /// The per-participant package sent in round 2 of the DKG protocol. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DkgRound2Package { /// This is the share we're sending to that participant. @@ -64,13 +58,11 @@ impl ::prost::Name for DkgRound2Package { const NAME: &'static str = "DKGRound2Package"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_frost.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_frost.v1.DKGRound2Package".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_frost.v1.DKGRound2Package".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_frost.v1.{}", Self::NAME) } } /// Represents a commitment to a nonce value. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NonceCommitment { /// These bytes should be a valid group element. @@ -81,13 +73,11 @@ impl ::prost::Name for NonceCommitment { const NAME: &'static str = "NonceCommitment"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_frost.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_frost.v1.NonceCommitment".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_frost.v1.NonceCommitment".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_frost.v1.{}", Self::NAME) } } /// Represents the commitments to nonces needed for signing. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SigningCommitments { /// One nonce to hide them. @@ -101,13 +91,11 @@ impl ::prost::Name for SigningCommitments { const NAME: &'static str = "SigningCommitments"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_frost.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_frost.v1.SigningCommitments".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_frost.v1.SigningCommitments".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_frost.v1.{}", Self::NAME) } } /// A share of the final signature. These get aggregated to make the actual thing. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignatureShare { /// These bytes should be a valid scalar. @@ -118,9 +106,6 @@ impl ::prost::Name for SignatureShare { const NAME: &'static str = "SignatureShare"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_frost.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_frost.v1.SignatureShare".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_frost.v1.SignatureShare".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_frost.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.crypto.decaf377_frost.v1.serde.rs b/crates/proto/src/gen/penumbra.crypto.decaf377_frost.v1.serde.rs index 4e98b9248e..b84daf70b8 100644 --- a/crates/proto/src/gen/penumbra.crypto.decaf377_frost.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.crypto.decaf377_frost.v1.serde.rs @@ -18,7 +18,6 @@ impl serde::Serialize for DkgRound1Package { } if !self.proof_of_knowledge.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proofOfKnowledge", pbjson::private::base64::encode(&self.proof_of_knowledge).as_str())?; } struct_ser.end() @@ -225,7 +224,6 @@ impl serde::Serialize for NonceCommitment { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.decaf377_frost.v1.NonceCommitment", len)?; if !self.element.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("element", pbjson::private::base64::encode(&self.element).as_str())?; } struct_ser.end() @@ -324,7 +322,6 @@ impl serde::Serialize for SignatureShare { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.decaf377_frost.v1.SignatureShare", len)?; if !self.scalar.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("scalar", pbjson::private::base64::encode(&self.scalar).as_str())?; } struct_ser.end() @@ -535,7 +532,6 @@ impl serde::Serialize for SigningShare { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.decaf377_frost.v1.SigningShare", len)?; if !self.scalar.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("scalar", pbjson::private::base64::encode(&self.scalar).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.crypto.decaf377_rdsa.v1.rs b/crates/proto/src/gen/penumbra.crypto.decaf377_rdsa.v1.rs index 5402c059a0..b2686fa74f 100644 --- a/crates/proto/src/gen/penumbra.crypto.decaf377_rdsa.v1.rs +++ b/crates/proto/src/gen/penumbra.crypto.decaf377_rdsa.v1.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpendAuthSignature { #[prost(bytes = "vec", tag = "1")] @@ -8,12 +8,10 @@ impl ::prost::Name for SpendAuthSignature { const NAME: &'static str = "SpendAuthSignature"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_rdsa.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_rdsa.v1.SpendAuthSignature".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_rdsa.v1.SpendAuthSignature".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_rdsa.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BindingSignature { #[prost(bytes = "vec", tag = "1")] @@ -23,12 +21,10 @@ impl ::prost::Name for BindingSignature { const NAME: &'static str = "BindingSignature"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_rdsa.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_rdsa.v1.BindingSignature".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_rdsa.v1.BindingSignature".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_rdsa.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpendVerificationKey { #[prost(bytes = "vec", tag = "1")] @@ -38,9 +34,6 @@ impl ::prost::Name for SpendVerificationKey { const NAME: &'static str = "SpendVerificationKey"; const PACKAGE: &'static str = "penumbra.crypto.decaf377_rdsa.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.decaf377_rdsa.v1.SpendVerificationKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.decaf377_rdsa.v1.SpendVerificationKey".into() + ::prost::alloc::format!("penumbra.crypto.decaf377_rdsa.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.crypto.decaf377_rdsa.v1.serde.rs b/crates/proto/src/gen/penumbra.crypto.decaf377_rdsa.v1.serde.rs index 1f976b4ded..5ffa592a93 100644 --- a/crates/proto/src/gen/penumbra.crypto.decaf377_rdsa.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.crypto.decaf377_rdsa.v1.serde.rs @@ -12,7 +12,6 @@ impl serde::Serialize for BindingSignature { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.decaf377_rdsa.v1.BindingSignature", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -111,7 +110,6 @@ impl serde::Serialize for SpendAuthSignature { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.decaf377_rdsa.v1.SpendAuthSignature", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -210,7 +208,6 @@ impl serde::Serialize for SpendVerificationKey { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.decaf377_rdsa.v1.SpendVerificationKey", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.crypto.tct.v1.rs b/crates/proto/src/gen/penumbra.crypto.tct.v1.rs index 9f7d8bf365..b3979ce556 100644 --- a/crates/proto/src/gen/penumbra.crypto.tct.v1.rs +++ b/crates/proto/src/gen/penumbra.crypto.tct.v1.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct StateCommitment { #[prost(bytes = "vec", tag = "1")] @@ -8,12 +8,10 @@ impl ::prost::Name for StateCommitment { const NAME: &'static str = "StateCommitment"; const PACKAGE: &'static str = "penumbra.crypto.tct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.tct.v1.StateCommitment".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.tct.v1.StateCommitment".into() + ::prost::alloc::format!("penumbra.crypto.tct.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MerkleRoot { #[prost(bytes = "vec", tag = "1")] @@ -23,13 +21,11 @@ impl ::prost::Name for MerkleRoot { const NAME: &'static str = "MerkleRoot"; const PACKAGE: &'static str = "penumbra.crypto.tct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.tct.v1.MerkleRoot".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.tct.v1.MerkleRoot".into() + ::prost::alloc::format!("penumbra.crypto.tct.v1.{}", Self::NAME) } } /// An authentication path from a state commitment to the root of the state commitment tree. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct StateCommitmentProof { #[prost(message, optional, tag = "1")] @@ -44,13 +40,11 @@ impl ::prost::Name for StateCommitmentProof { const NAME: &'static str = "StateCommitmentProof"; const PACKAGE: &'static str = "penumbra.crypto.tct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.tct.v1.StateCommitmentProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.tct.v1.StateCommitmentProof".into() + ::prost::alloc::format!("penumbra.crypto.tct.v1.{}", Self::NAME) } } /// A set of 3 sibling hashes in the auth path for some note commitment. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MerklePathChunk { #[prost(bytes = "vec", tag = "1")] @@ -64,9 +58,6 @@ impl ::prost::Name for MerklePathChunk { const NAME: &'static str = "MerklePathChunk"; const PACKAGE: &'static str = "penumbra.crypto.tct.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.crypto.tct.v1.MerklePathChunk".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.crypto.tct.v1.MerklePathChunk".into() + ::prost::alloc::format!("penumbra.crypto.tct.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.crypto.tct.v1.serde.rs b/crates/proto/src/gen/penumbra.crypto.tct.v1.serde.rs index 6a55bc4ec7..e55d9ef249 100644 --- a/crates/proto/src/gen/penumbra.crypto.tct.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.crypto.tct.v1.serde.rs @@ -18,17 +18,14 @@ impl serde::Serialize for MerklePathChunk { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.tct.v1.MerklePathChunk", len)?; if !self.sibling_1.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sibling1", pbjson::private::base64::encode(&self.sibling_1).as_str())?; } if !self.sibling_2.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sibling2", pbjson::private::base64::encode(&self.sibling_2).as_str())?; } if !self.sibling_3.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sibling3", pbjson::private::base64::encode(&self.sibling_3).as_str())?; } struct_ser.end() @@ -156,7 +153,6 @@ impl serde::Serialize for MerkleRoot { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.tct.v1.MerkleRoot", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -255,7 +251,6 @@ impl serde::Serialize for StateCommitment { let mut struct_ser = serializer.serialize_struct("penumbra.crypto.tct.v1.StateCommitment", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -363,7 +358,6 @@ impl serde::Serialize for StateCommitmentProof { } if self.position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("position", ToString::to_string(&self.position).as_str())?; } if !self.auth_path.is_empty() { diff --git a/crates/proto/src/gen/penumbra.custody.threshold.v1.rs b/crates/proto/src/gen/penumbra.custody.threshold.v1.rs index 9aa9410cb6..2bf788a6b6 100644 --- a/crates/proto/src/gen/penumbra.custody.threshold.v1.rs +++ b/crates/proto/src/gen/penumbra.custody.threshold.v1.rs @@ -1,7 +1,7 @@ -// This file is @generated by prost-build. /// A key one can use to verify signatures. /// /// This key can also serve as a unique identifier for users. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct VerificationKey { #[prost(bytes = "vec", tag = "1")] @@ -11,13 +11,11 @@ impl ::prost::Name for VerificationKey { const NAME: &'static str = "VerificationKey"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.VerificationKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.VerificationKey".into() + ::prost::alloc::format!("penumbra.custody.threshold.v1.{}", Self::NAME) } } /// A signature proving that a message was created by the owner of a verification key. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Signature { #[prost(bytes = "vec", tag = "1")] @@ -27,13 +25,11 @@ impl ::prost::Name for Signature { const NAME: &'static str = "Signature"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.Signature".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.Signature".into() + ::prost::alloc::format!("penumbra.custody.threshold.v1.{}", Self::NAME) } } /// The message the coordinator sends in round 1 of the signing protocol. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CoordinatorRound1 { #[prost(oneof = "coordinator_round1::Request", tags = "1, 2, 3")] @@ -41,6 +37,7 @@ pub struct CoordinatorRound1 { } /// Nested message and enum types in `CoordinatorRound1`. pub mod coordinator_round1 { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Request { /// The plan that the coordinator would like the followers to sign. @@ -62,13 +59,11 @@ impl ::prost::Name for CoordinatorRound1 { const NAME: &'static str = "CoordinatorRound1"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.CoordinatorRound1".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.CoordinatorRound1".into() + ::prost::alloc::format!("penumbra.custody.threshold.v1.{}", Self::NAME) } } /// The message the coordinator sends in round 2 of the signing protocol. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CoordinatorRound2 { /// The underlying signing packages being sent to the followers, for each signature. @@ -80,6 +75,7 @@ pub struct CoordinatorRound2 { /// Nested message and enum types in `CoordinatorRound2`. pub mod coordinator_round2 { /// A commitment along with a FROST identifier. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct IdentifiedCommitments { /// The serialization of a FROST identifier. @@ -95,12 +91,9 @@ pub mod coordinator_round2 { const NAME: &'static str = "IdentifiedCommitments"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.CoordinatorRound2.IdentifiedCommitments" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.CoordinatorRound2.IdentifiedCommitments" - .into() + ::prost::alloc::format!( + "penumbra.custody.threshold.v1.CoordinatorRound2.{}", Self::NAME + ) } } /// A FROST signing package without a message. @@ -108,6 +101,7 @@ pub mod coordinator_round2 { /// We structure things this way because the message is derived from the transaction plan. /// FROST expects the signing package to include the identified commitments *and* /// the message, but we have no need to include the message. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PartialSigningPackage { #[prost(message, repeated, tag = "1")] @@ -117,12 +111,9 @@ pub mod coordinator_round2 { const NAME: &'static str = "PartialSigningPackage"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.CoordinatorRound2.PartialSigningPackage" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.CoordinatorRound2.PartialSigningPackage" - .into() + ::prost::alloc::format!( + "penumbra.custody.threshold.v1.CoordinatorRound2.{}", Self::NAME + ) } } } @@ -130,13 +121,11 @@ impl ::prost::Name for CoordinatorRound2 { const NAME: &'static str = "CoordinatorRound2"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.CoordinatorRound2".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.CoordinatorRound2".into() + ::prost::alloc::format!("penumbra.custody.threshold.v1.{}", Self::NAME) } } /// The first message the followers send back to the coordinator when signing. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct FollowerRound1 { #[prost(message, optional, tag = "1")] @@ -151,6 +140,7 @@ pub struct FollowerRound1 { /// Nested message and enum types in `FollowerRound1`. pub mod follower_round1 { /// The inner message that will be signed by the follower. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Inner { /// One signing commitment pair for each signature requested by the plan, in order. @@ -163,10 +153,9 @@ pub mod follower_round1 { const NAME: &'static str = "Inner"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.FollowerRound1.Inner".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.FollowerRound1.Inner".into() + ::prost::alloc::format!( + "penumbra.custody.threshold.v1.FollowerRound1.{}", Self::NAME + ) } } } @@ -174,13 +163,11 @@ impl ::prost::Name for FollowerRound1 { const NAME: &'static str = "FollowerRound1"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.FollowerRound1".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.FollowerRound1".into() + ::prost::alloc::format!("penumbra.custody.threshold.v1.{}", Self::NAME) } } /// The second message the followers send back to the coordinator when signing. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct FollowerRound2 { #[prost(message, optional, tag = "1")] @@ -195,6 +182,7 @@ pub struct FollowerRound2 { /// Nested message and enum types in `FollowerRound2`. pub mod follower_round2 { /// The inner message that will be signed by the follower. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Inner { /// One share for each signature requested by the plan, in order. @@ -207,10 +195,9 @@ pub mod follower_round2 { const NAME: &'static str = "Inner"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.FollowerRound2.Inner".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.FollowerRound2.Inner".into() + ::prost::alloc::format!( + "penumbra.custody.threshold.v1.FollowerRound2.{}", Self::NAME + ) } } } @@ -218,13 +205,11 @@ impl ::prost::Name for FollowerRound2 { const NAME: &'static str = "FollowerRound2"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.FollowerRound2".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.FollowerRound2".into() + ::prost::alloc::format!("penumbra.custody.threshold.v1.{}", Self::NAME) } } /// The first message we broadcast in the DKG protocol. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DkgRound1 { /// The package we're sending to other people @@ -246,13 +231,11 @@ impl ::prost::Name for DkgRound1 { const NAME: &'static str = "DKGRound1"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.DKGRound1".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.DKGRound1".into() + ::prost::alloc::format!("penumbra.custody.threshold.v1.{}", Self::NAME) } } /// The second message we broadcast in the DKG protocol. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DkgRound2 { #[prost(message, optional, tag = "1")] @@ -267,6 +250,7 @@ pub struct DkgRound2 { /// Nested message and enum types in `DKGRound2`. pub mod dkg_round2 { /// A round2 package, encrypted, along with an identifier for the recipient. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TargetedPackage { /// A verification key identifying the recipient. @@ -280,13 +264,13 @@ pub mod dkg_round2 { const NAME: &'static str = "TargetedPackage"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.DKGRound2.TargetedPackage".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.DKGRound2.TargetedPackage".into() + ::prost::alloc::format!( + "penumbra.custody.threshold.v1.DKGRound2.{}", Self::NAME + ) } } /// An inner message that will be signed. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Inner { /// Encrypted packages for each recipient. @@ -300,10 +284,9 @@ pub mod dkg_round2 { const NAME: &'static str = "Inner"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.DKGRound2.Inner".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.DKGRound2.Inner".into() + ::prost::alloc::format!( + "penumbra.custody.threshold.v1.DKGRound2.{}", Self::NAME + ) } } } @@ -311,9 +294,6 @@ impl ::prost::Name for DkgRound2 { const NAME: &'static str = "DKGRound2"; const PACKAGE: &'static str = "penumbra.custody.threshold.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.threshold.v1.DKGRound2".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.threshold.v1.DKGRound2".into() + ::prost::alloc::format!("penumbra.custody.threshold.v1.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/penumbra.custody.threshold.v1.serde.rs b/crates/proto/src/gen/penumbra.custody.threshold.v1.serde.rs index 224579889e..86ecbc1f91 100644 --- a/crates/proto/src/gen/penumbra.custody.threshold.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.custody.threshold.v1.serde.rs @@ -239,7 +239,6 @@ impl serde::Serialize for coordinator_round2::IdentifiedCommitments { let mut struct_ser = serializer.serialize_struct("penumbra.custody.threshold.v1.CoordinatorRound2.IdentifiedCommitments", len)?; if !self.identifier.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("identifier", pbjson::private::base64::encode(&self.identifier).as_str())?; } if let Some(v) = self.commitments.as_ref() { @@ -460,17 +459,14 @@ impl serde::Serialize for DkgRound1 { } if !self.nullifier_commitment.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nullifierCommitment", pbjson::private::base64::encode(&self.nullifier_commitment).as_str())?; } if !self.epk.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epk", pbjson::private::base64::encode(&self.epk).as_str())?; } if !self.vk.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("vk", pbjson::private::base64::encode(&self.vk).as_str())?; } struct_ser.end() @@ -616,12 +612,10 @@ impl serde::Serialize for DkgRound2 { } if !self.vk.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("vk", pbjson::private::base64::encode(&self.vk).as_str())?; } if !self.sig.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sig", pbjson::private::base64::encode(&self.sig).as_str())?; } struct_ser.end() @@ -750,7 +744,6 @@ impl serde::Serialize for dkg_round2::Inner { } if !self.nullifier.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nullifier", pbjson::private::base64::encode(&self.nullifier).as_str())?; } struct_ser.end() @@ -864,12 +857,10 @@ impl serde::Serialize for dkg_round2::TargetedPackage { let mut struct_ser = serializer.serialize_struct("penumbra.custody.threshold.v1.DKGRound2.TargetedPackage", len)?; if !self.vk.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("vk", pbjson::private::base64::encode(&self.vk).as_str())?; } if !self.encrypted_package.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("encryptedPackage", pbjson::private::base64::encode(&self.encrypted_package).as_str())?; } struct_ser.end() @@ -1430,7 +1421,6 @@ impl serde::Serialize for Signature { let mut struct_ser = serializer.serialize_struct("penumbra.custody.threshold.v1.Signature", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() @@ -1529,7 +1519,6 @@ impl serde::Serialize for VerificationKey { let mut struct_ser = serializer.serialize_struct("penumbra.custody.threshold.v1.VerificationKey", len)?; if !self.inner.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("inner", pbjson::private::base64::encode(&self.inner).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.custody.v1.rs b/crates/proto/src/gen/penumbra.custody.v1.rs index 0f4d04f470..82fe5c426b 100644 --- a/crates/proto/src/gen/penumbra.custody.v1.rs +++ b/crates/proto/src/gen/penumbra.custody.v1.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizeRequest { /// The transaction plan to authorize. @@ -19,12 +19,10 @@ impl ::prost::Name for AuthorizeRequest { const NAME: &'static str = "AuthorizeRequest"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.AuthorizeRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.AuthorizeRequest".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizeResponse { #[prost(message, optional, tag = "1")] @@ -36,12 +34,10 @@ impl ::prost::Name for AuthorizeResponse { const NAME: &'static str = "AuthorizeResponse"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.AuthorizeResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.AuthorizeResponse".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizeValidatorDefinitionRequest { /// The validator definition to authorize. @@ -62,12 +58,10 @@ impl ::prost::Name for AuthorizeValidatorDefinitionRequest { const NAME: &'static str = "AuthorizeValidatorDefinitionRequest"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.AuthorizeValidatorDefinitionRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.AuthorizeValidatorDefinitionRequest".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizeValidatorDefinitionResponse { /// The authorization signature for the validator definition. @@ -80,12 +74,10 @@ impl ::prost::Name for AuthorizeValidatorDefinitionResponse { const NAME: &'static str = "AuthorizeValidatorDefinitionResponse"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.AuthorizeValidatorDefinitionResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.AuthorizeValidatorDefinitionResponse".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizeValidatorVoteRequest { /// The validator vote to authorize. @@ -106,12 +98,10 @@ impl ::prost::Name for AuthorizeValidatorVoteRequest { const NAME: &'static str = "AuthorizeValidatorVoteRequest"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.AuthorizeValidatorVoteRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.AuthorizeValidatorVoteRequest".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizeValidatorVoteResponse { /// The authorization signature for the validator vote. @@ -124,16 +114,14 @@ impl ::prost::Name for AuthorizeValidatorVoteResponse { const NAME: &'static str = "AuthorizeValidatorVoteResponse"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.AuthorizeValidatorVoteResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.AuthorizeValidatorVoteResponse".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } /// A pre-authorization packet. This allows a custodian to delegate (partial) /// signing authority to other authorization mechanisms. Details of how a /// custodian manages those keys are out-of-scope for the custody protocol and /// are custodian-specific. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PreAuthorization { #[prost(oneof = "pre_authorization::PreAuthorization", tags = "1")] @@ -143,6 +131,7 @@ pub struct PreAuthorization { pub mod pre_authorization { /// An Ed25519-based preauthorization, containing an Ed25519 signature over the /// `TransactionPlan`. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Ed25519 { /// The Ed25519 verification key used to verify the signature. @@ -156,12 +145,12 @@ pub mod pre_authorization { const NAME: &'static str = "Ed25519"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.PreAuthorization.Ed25519".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.PreAuthorization.Ed25519".into() + ::prost::alloc::format!( + "penumbra.custody.v1.PreAuthorization.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum PreAuthorization { #[prost(message, tag = "1")] @@ -172,24 +161,20 @@ impl ::prost::Name for PreAuthorization { const NAME: &'static str = "PreAuthorization"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.PreAuthorization".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.PreAuthorization".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ExportFullViewingKeyRequest {} impl ::prost::Name for ExportFullViewingKeyRequest { const NAME: &'static str = "ExportFullViewingKeyRequest"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.ExportFullViewingKeyRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.ExportFullViewingKeyRequest".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ExportFullViewingKeyResponse { /// The full viewing key. @@ -202,12 +187,10 @@ impl ::prost::Name for ExportFullViewingKeyResponse { const NAME: &'static str = "ExportFullViewingKeyResponse"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.ExportFullViewingKeyResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.ExportFullViewingKeyResponse".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ConfirmAddressRequest { #[prost(message, optional, tag = "1")] @@ -219,12 +202,10 @@ impl ::prost::Name for ConfirmAddressRequest { const NAME: &'static str = "ConfirmAddressRequest"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.ConfirmAddressRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.ConfirmAddressRequest".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ConfirmAddressResponse { #[prost(message, optional, tag = "1")] @@ -234,22 +215,13 @@ impl ::prost::Name for ConfirmAddressResponse { const NAME: &'static str = "ConfirmAddressResponse"; const PACKAGE: &'static str = "penumbra.custody.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.custody.v1.ConfirmAddressResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.custody.v1.ConfirmAddressResponse".into() + ::prost::alloc::format!("penumbra.custody.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod custody_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// The custody protocol is used by a wallet client to request authorization for @@ -282,8 +254,8 @@ pub mod custody_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -308,7 +280,7 @@ pub mod custody_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { CustodyServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -355,7 +327,8 @@ pub mod custody_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -382,7 +355,8 @@ pub mod custody_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -412,7 +386,8 @@ pub mod custody_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -445,7 +420,8 @@ pub mod custody_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -480,7 +456,8 @@ pub mod custody_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -503,17 +480,11 @@ pub mod custody_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod custody_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with CustodyServiceServer. #[async_trait] - pub trait CustodyService: std::marker::Send + std::marker::Sync + 'static { + pub trait CustodyService: Send + Sync + 'static { /// Requests authorization of the transaction with the given description. async fn authorize( &self, @@ -575,18 +546,20 @@ pub mod custody_service_server { /// understand the transaction and determine whether or not it should be /// authorized. #[derive(Debug)] - pub struct CustodyServiceServer { - inner: Arc, + pub struct CustodyServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl CustodyServiceServer { + struct _Inner(Arc); + impl CustodyServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -636,8 +609,8 @@ pub mod custody_service_server { impl tonic::codegen::Service> for CustodyServiceServer where T: CustodyService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -649,6 +622,7 @@ pub mod custody_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.custody.v1.CustodyService/Authorize" => { #[allow(non_camel_case_types)] @@ -679,6 +653,7 @@ pub mod custody_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AuthorizeSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -733,6 +708,7 @@ pub mod custody_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AuthorizeValidatorDefinitionSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -782,6 +758,7 @@ pub mod custody_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AuthorizeValidatorVoteSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -831,6 +808,7 @@ pub mod custody_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ExportFullViewingKeySvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -877,6 +855,7 @@ pub mod custody_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ConfirmAddressSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -895,25 +874,20 @@ pub mod custody_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for CustodyServiceServer { + impl Clone for CustodyServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -925,9 +899,17 @@ pub mod custody_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.custody.v1.CustodyService"; - impl tonic::server::NamedService for CustodyServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for CustodyServiceServer { + const NAME: &'static str = "penumbra.custody.v1.CustodyService"; } } diff --git a/crates/proto/src/gen/penumbra.custody.v1.serde.rs b/crates/proto/src/gen/penumbra.custody.v1.serde.rs index 86de4fb40e..2b510aa434 100644 --- a/crates/proto/src/gen/penumbra.custody.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.custody.v1.serde.rs @@ -1102,12 +1102,10 @@ impl serde::Serialize for pre_authorization::Ed25519 { let mut struct_ser = serializer.serialize_struct("penumbra.custody.v1.PreAuthorization.Ed25519", len)?; if !self.vk.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("vk", pbjson::private::base64::encode(&self.vk).as_str())?; } if !self.sig.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("sig", pbjson::private::base64::encode(&self.sig).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.tools.summoning.v1.rs b/crates/proto/src/gen/penumbra.tools.summoning.v1.rs index 8a41149ef6..b9ab44a091 100644 --- a/crates/proto/src/gen/penumbra.tools.summoning.v1.rs +++ b/crates/proto/src/gen/penumbra.tools.summoning.v1.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ParticipateRequest { #[prost(oneof = "participate_request::Msg", tags = "1, 2")] @@ -7,6 +7,7 @@ pub struct ParticipateRequest { /// Nested message and enum types in `ParticipateRequest`. pub mod participate_request { /// Sent at the beginning of the connection to identify the participant. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Identify { #[prost(message, optional, tag = "1")] @@ -18,13 +19,13 @@ pub mod participate_request { const NAME: &'static str = "Identify"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.ParticipateRequest.Identify".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.ParticipateRequest.Identify".into() + ::prost::alloc::format!( + "penumbra.tools.summoning.v1.ParticipateRequest.{}", Self::NAME + ) } } /// Sent by the participant after getting a `ContributeNow` message. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Contribution { #[prost(message, optional, tag = "1")] @@ -38,12 +39,12 @@ pub mod participate_request { const NAME: &'static str = "Contribution"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.ParticipateRequest.Contribution".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.ParticipateRequest.Contribution".into() + ::prost::alloc::format!( + "penumbra.tools.summoning.v1.ParticipateRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Msg { #[prost(message, tag = "1")] @@ -56,12 +57,10 @@ impl ::prost::Name for ParticipateRequest { const NAME: &'static str = "ParticipateRequest"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.ParticipateRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.ParticipateRequest".into() + ::prost::alloc::format!("penumbra.tools.summoning.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CeremonyCrs { #[prost(bytes = "vec", tag = "100")] @@ -83,12 +82,10 @@ impl ::prost::Name for CeremonyCrs { const NAME: &'static str = "CeremonyCrs"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.CeremonyCrs".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.CeremonyCrs".into() + ::prost::alloc::format!("penumbra.tools.summoning.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CeremonyLinkingProof { #[prost(bytes = "vec", tag = "100")] @@ -110,12 +107,10 @@ impl ::prost::Name for CeremonyLinkingProof { const NAME: &'static str = "CeremonyLinkingProof"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.CeremonyLinkingProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.CeremonyLinkingProof".into() + ::prost::alloc::format!("penumbra.tools.summoning.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CeremonyParentHashes { #[prost(bytes = "vec", tag = "100")] @@ -137,12 +132,10 @@ impl ::prost::Name for CeremonyParentHashes { const NAME: &'static str = "CeremonyParentHashes"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.CeremonyParentHashes".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.CeremonyParentHashes".into() + ::prost::alloc::format!("penumbra.tools.summoning.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ParticipateResponse { #[prost(oneof = "participate_response::Msg", tags = "1, 2, 3")] @@ -151,7 +144,8 @@ pub struct ParticipateResponse { /// Nested message and enum types in `ParticipateResponse`. pub mod participate_response { /// Streamed to the participant to inform them of their position in the queue. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Position { /// The position of the participant in the queue. #[prost(uint32, tag = "1")] @@ -174,13 +168,13 @@ pub mod participate_response { const NAME: &'static str = "Position"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.ParticipateResponse.Position".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.ParticipateResponse.Position".into() + ::prost::alloc::format!( + "penumbra.tools.summoning.v1.ParticipateResponse.{}", Self::NAME + ) } } /// Sent to the participant to inform them that they should contribute now. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ContributeNow { /// The most recent CRS, which the participant should update. @@ -191,14 +185,14 @@ pub mod participate_response { const NAME: &'static str = "ContributeNow"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.ParticipateResponse.ContributeNow".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.ParticipateResponse.ContributeNow".into() + ::prost::alloc::format!( + "penumbra.tools.summoning.v1.ParticipateResponse.{}", Self::NAME + ) } } /// Sent to the participant to confim their contribution was accepted. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct Confirm { #[prost(uint64, tag = "1")] pub slot: u64, @@ -207,12 +201,12 @@ pub mod participate_response { const NAME: &'static str = "Confirm"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.ParticipateResponse.Confirm".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.ParticipateResponse.Confirm".into() + ::prost::alloc::format!( + "penumbra.tools.summoning.v1.ParticipateResponse.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Msg { #[prost(message, tag = "1")] @@ -227,22 +221,13 @@ impl ::prost::Name for ParticipateResponse { const NAME: &'static str = "ParticipateResponse"; const PACKAGE: &'static str = "penumbra.tools.summoning.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.tools.summoning.v1.ParticipateResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.tools.summoning.v1.ParticipateResponse".into() + ::prost::alloc::format!("penumbra.tools.summoning.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod ceremony_coordinator_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Runs a Phase 2 MPC ceremony with dynamic slot allocation. @@ -265,8 +250,8 @@ pub mod ceremony_coordinator_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -291,7 +276,7 @@ pub mod ceremony_coordinator_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { CeremonyCoordinatorServiceClient::new( InterceptedService::new(inner, interceptor), @@ -353,7 +338,8 @@ pub mod ceremony_coordinator_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -376,22 +362,16 @@ pub mod ceremony_coordinator_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod ceremony_coordinator_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with CeremonyCoordinatorServiceServer. #[async_trait] - pub trait CeremonyCoordinatorService: std::marker::Send + std::marker::Sync + 'static { + pub trait CeremonyCoordinatorService: Send + Sync + 'static { /// Server streaming response type for the Participate method. type ParticipateStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// The protocol used to participate in the ceremony. /// @@ -415,18 +395,20 @@ pub mod ceremony_coordinator_service_server { } /// Runs a Phase 2 MPC ceremony with dynamic slot allocation. #[derive(Debug)] - pub struct CeremonyCoordinatorServiceServer { - inner: Arc, + pub struct CeremonyCoordinatorServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl CeremonyCoordinatorServiceServer { + struct _Inner(Arc); + impl CeremonyCoordinatorServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -477,8 +459,8 @@ pub mod ceremony_coordinator_service_server { for CeremonyCoordinatorServiceServer where T: CeremonyCoordinatorService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -490,6 +472,7 @@ pub mod ceremony_coordinator_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.tools.summoning.v1.CeremonyCoordinatorService/Participate" => { #[allow(non_camel_case_types)] @@ -527,6 +510,7 @@ pub mod ceremony_coordinator_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ParticipateSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -545,25 +529,20 @@ pub mod ceremony_coordinator_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for CeremonyCoordinatorServiceServer { + impl Clone for CeremonyCoordinatorServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -575,9 +554,18 @@ pub mod ceremony_coordinator_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.tools.summoning.v1.CeremonyCoordinatorService"; - impl tonic::server::NamedService for CeremonyCoordinatorServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService + for CeremonyCoordinatorServiceServer { + const NAME: &'static str = "penumbra.tools.summoning.v1.CeremonyCoordinatorService"; } } diff --git a/crates/proto/src/gen/penumbra.tools.summoning.v1.serde.rs b/crates/proto/src/gen/penumbra.tools.summoning.v1.serde.rs index 8b47ad338f..5864da3de6 100644 --- a/crates/proto/src/gen/penumbra.tools.summoning.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.tools.summoning.v1.serde.rs @@ -30,37 +30,30 @@ impl serde::Serialize for CeremonyCrs { let mut struct_ser = serializer.serialize_struct("penumbra.tools.summoning.v1.CeremonyCrs", len)?; if !self.spend.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("spend", pbjson::private::base64::encode(&self.spend).as_str())?; } if !self.output.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("output", pbjson::private::base64::encode(&self.output).as_str())?; } if !self.delegator_vote.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("delegatorVote", pbjson::private::base64::encode(&self.delegator_vote).as_str())?; } if !self.undelegate_claim.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("undelegateClaim", pbjson::private::base64::encode(&self.undelegate_claim).as_str())?; } if !self.swap.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("swap", pbjson::private::base64::encode(&self.swap).as_str())?; } if !self.swap_claim.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("swapClaim", pbjson::private::base64::encode(&self.swap_claim).as_str())?; } if !self.nullifer_derivation_crs.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nulliferDerivationCrs", pbjson::private::base64::encode(&self.nullifer_derivation_crs).as_str())?; } struct_ser.end() @@ -259,37 +252,30 @@ impl serde::Serialize for CeremonyLinkingProof { let mut struct_ser = serializer.serialize_struct("penumbra.tools.summoning.v1.CeremonyLinkingProof", len)?; if !self.spend.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("spend", pbjson::private::base64::encode(&self.spend).as_str())?; } if !self.output.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("output", pbjson::private::base64::encode(&self.output).as_str())?; } if !self.delegator_vote.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("delegatorVote", pbjson::private::base64::encode(&self.delegator_vote).as_str())?; } if !self.undelegate_claim.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("undelegateClaim", pbjson::private::base64::encode(&self.undelegate_claim).as_str())?; } if !self.swap.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("swap", pbjson::private::base64::encode(&self.swap).as_str())?; } if !self.swap_claim.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("swapClaim", pbjson::private::base64::encode(&self.swap_claim).as_str())?; } if !self.nullifer_derivation_crs.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nulliferDerivationCrs", pbjson::private::base64::encode(&self.nullifer_derivation_crs).as_str())?; } struct_ser.end() @@ -488,37 +474,30 @@ impl serde::Serialize for CeremonyParentHashes { let mut struct_ser = serializer.serialize_struct("penumbra.tools.summoning.v1.CeremonyParentHashes", len)?; if !self.spend.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("spend", pbjson::private::base64::encode(&self.spend).as_str())?; } if !self.output.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("output", pbjson::private::base64::encode(&self.output).as_str())?; } if !self.delegator_vote.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("delegatorVote", pbjson::private::base64::encode(&self.delegator_vote).as_str())?; } if !self.undelegate_claim.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("undelegateClaim", pbjson::private::base64::encode(&self.undelegate_claim).as_str())?; } if !self.swap.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("swap", pbjson::private::base64::encode(&self.swap).as_str())?; } if !self.swap_claim.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("swapClaim", pbjson::private::base64::encode(&self.swap_claim).as_str())?; } if !self.nullifer_derivation_crs.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("nulliferDerivationCrs", pbjson::private::base64::encode(&self.nullifer_derivation_crs).as_str())?; } struct_ser.end() @@ -1165,7 +1144,6 @@ impl serde::Serialize for participate_response::Confirm { let mut struct_ser = serializer.serialize_struct("penumbra.tools.summoning.v1.ParticipateResponse.Confirm", len)?; if self.slot != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("slot", ToString::to_string(&self.slot).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/penumbra.util.tendermint_proxy.v1.rs b/crates/proto/src/gen/penumbra.util.tendermint_proxy.v1.rs index 97a472b1ed..21e813cb1f 100644 --- a/crates/proto/src/gen/penumbra.util.tendermint_proxy.v1.rs +++ b/crates/proto/src/gen/penumbra.util.tendermint_proxy.v1.rs @@ -1,5 +1,5 @@ -// This file is @generated by prost-build. /// GetTxRequest is the request type for the GetTx RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTxRequest { /// Hash of transaction to retrieve @@ -13,13 +13,11 @@ impl ::prost::Name for GetTxRequest { const NAME: &'static str = "GetTxRequest"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.GetTxRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.GetTxRequest".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// GetTxResponse is the response type for the GetTx RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetTxResponse { /// Hash of transaction @@ -38,12 +36,10 @@ impl ::prost::Name for GetTxResponse { const NAME: &'static str = "GetTxResponse"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.GetTxResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.GetTxResponse".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxResult { #[prost(string, tag = "1")] @@ -59,12 +55,10 @@ impl ::prost::Name for TxResult { const NAME: &'static str = "TxResult"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.TxResult".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.TxResult".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Tag { #[prost(bytes = "vec", tag = "1")] @@ -78,13 +72,11 @@ impl ::prost::Name for Tag { const NAME: &'static str = "Tag"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.Tag".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.Tag".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// BroadcastTxAsyncRequest is the request type for the BroadcastTxAsync RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastTxAsyncRequest { #[prost(bytes = "vec", tag = "1")] @@ -96,13 +88,11 @@ impl ::prost::Name for BroadcastTxAsyncRequest { const NAME: &'static str = "BroadcastTxAsyncRequest"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.BroadcastTxAsyncRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.BroadcastTxAsyncRequest".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// BroadcastTxAsyncResponse is the response type for the BroadcastTxAsync RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastTxAsyncResponse { #[prost(uint64, tag = "1")] @@ -118,13 +108,11 @@ impl ::prost::Name for BroadcastTxAsyncResponse { const NAME: &'static str = "BroadcastTxAsyncResponse"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.BroadcastTxAsyncResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.BroadcastTxAsyncResponse".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// BroadcastTxSyncRequest is the request type for the BroadcastTxSync RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastTxSyncRequest { #[prost(bytes = "vec", tag = "1")] @@ -136,13 +124,11 @@ impl ::prost::Name for BroadcastTxSyncRequest { const NAME: &'static str = "BroadcastTxSyncRequest"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.BroadcastTxSyncRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.BroadcastTxSyncRequest".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// BroadcastTxSyncResponse is the response type for the BroadcastTxSync RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastTxSyncResponse { #[prost(uint64, tag = "1")] @@ -158,26 +144,22 @@ impl ::prost::Name for BroadcastTxSyncResponse { const NAME: &'static str = "BroadcastTxSyncResponse"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.BroadcastTxSyncResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.BroadcastTxSyncResponse".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// GetStatusRequest is the request type for the Query/GetStatus RPC method. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GetStatusRequest {} impl ::prost::Name for GetStatusRequest { const NAME: &'static str = "GetStatusRequest"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.GetStatusRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.GetStatusRequest".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// GetStatusResponse is the response type for the Query/GetStatus RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetStatusResponse { #[prost(message, optional, tag = "1")] @@ -195,12 +177,10 @@ impl ::prost::Name for GetStatusResponse { const NAME: &'static str = "GetStatusResponse"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.GetStatusResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.GetStatusResponse".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SyncInfo { #[prost(bytes = "vec", tag = "1")] @@ -224,13 +204,11 @@ impl ::prost::Name for SyncInfo { const NAME: &'static str = "SyncInfo"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.SyncInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.SyncInfo".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// ABCIQueryRequest defines the request structure for the ABCIQuery gRPC query. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AbciQueryRequest { #[prost(bytes = "vec", tag = "1")] @@ -246,16 +224,14 @@ impl ::prost::Name for AbciQueryRequest { const NAME: &'static str = "ABCIQueryRequest"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.ABCIQueryRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.ABCIQueryRequest".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// ABCIQueryResponse defines the response structure for the ABCIQuery gRPC query. /// /// Note: This type is a duplicate of the ResponseQuery proto type defined in /// Tendermint. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AbciQueryResponse { #[prost(uint32, tag = "1")] @@ -285,14 +261,12 @@ impl ::prost::Name for AbciQueryResponse { const NAME: &'static str = "ABCIQueryResponse"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.ABCIQueryResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.ABCIQueryResponse".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// GetBlockByHeightRequest is the request type for the Query/GetBlockByHeight RPC method. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GetBlockByHeightRequest { #[prost(int64, tag = "1")] pub height: i64, @@ -301,13 +275,11 @@ impl ::prost::Name for GetBlockByHeightRequest { const NAME: &'static str = "GetBlockByHeightRequest"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.GetBlockByHeightRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.GetBlockByHeightRequest".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// GetBlockByHeightResponse is the response type for the Query/GetBlockByHeight RPC method. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GetBlockByHeightResponse { #[prost(message, optional, tag = "1")] @@ -323,22 +295,13 @@ impl ::prost::Name for GetBlockByHeightResponse { const NAME: &'static str = "GetBlockByHeightResponse"; const PACKAGE: &'static str = "penumbra.util.tendermint_proxy.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.util.tendermint_proxy.v1.GetBlockByHeightResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.util.tendermint_proxy.v1.GetBlockByHeightResponse".into() + ::prost::alloc::format!("penumbra.util.tendermint_proxy.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod tendermint_proxy_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// Defines the gRPC query service for proxying requests to an upstream Tendermint RPC. @@ -361,8 +324,8 @@ pub mod tendermint_proxy_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -387,7 +350,7 @@ pub mod tendermint_proxy_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { TendermintProxyServiceClient::new( InterceptedService::new(inner, interceptor), @@ -436,7 +399,8 @@ pub mod tendermint_proxy_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -466,7 +430,8 @@ pub mod tendermint_proxy_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -496,7 +461,8 @@ pub mod tendermint_proxy_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -523,7 +489,8 @@ pub mod tendermint_proxy_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -555,7 +522,8 @@ pub mod tendermint_proxy_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -585,7 +553,8 @@ pub mod tendermint_proxy_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -608,17 +577,11 @@ pub mod tendermint_proxy_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod tendermint_proxy_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with TendermintProxyServiceServer. #[async_trait] - pub trait TendermintProxyService: std::marker::Send + std::marker::Sync + 'static { + pub trait TendermintProxyService: Send + Sync + 'static { /// Status queries the current status. async fn get_status( &self, @@ -669,18 +632,20 @@ pub mod tendermint_proxy_service_server { } /// Defines the gRPC query service for proxying requests to an upstream Tendermint RPC. #[derive(Debug)] - pub struct TendermintProxyServiceServer { - inner: Arc, + pub struct TendermintProxyServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl TendermintProxyServiceServer { + struct _Inner(Arc); + impl TendermintProxyServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -731,8 +696,8 @@ pub mod tendermint_proxy_service_server { for TendermintProxyServiceServer where T: TendermintProxyService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -744,6 +709,7 @@ pub mod tendermint_proxy_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.util.tendermint_proxy.v1.TendermintProxyService/GetStatus" => { #[allow(non_camel_case_types)] @@ -775,6 +741,7 @@ pub mod tendermint_proxy_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = GetStatusSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -824,6 +791,7 @@ pub mod tendermint_proxy_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = BroadcastTxAsyncSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -873,6 +841,7 @@ pub mod tendermint_proxy_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = BroadcastTxSyncSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -917,6 +886,7 @@ pub mod tendermint_proxy_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = GetTxSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -963,6 +933,7 @@ pub mod tendermint_proxy_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ABCIQuerySvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1012,6 +983,7 @@ pub mod tendermint_proxy_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = GetBlockByHeightSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1030,25 +1002,20 @@ pub mod tendermint_proxy_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for TendermintProxyServiceServer { + impl Clone for TendermintProxyServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -1060,9 +1027,18 @@ pub mod tendermint_proxy_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.util.tendermint_proxy.v1.TendermintProxyService"; - impl tonic::server::NamedService for TendermintProxyServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService + for TendermintProxyServiceServer { + const NAME: &'static str = "penumbra.util.tendermint_proxy.v1.TendermintProxyService"; } } diff --git a/crates/proto/src/gen/penumbra.util.tendermint_proxy.v1.serde.rs b/crates/proto/src/gen/penumbra.util.tendermint_proxy.v1.serde.rs index e2e0d3373e..592140e6c7 100644 --- a/crates/proto/src/gen/penumbra.util.tendermint_proxy.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.util.tendermint_proxy.v1.serde.rs @@ -21,7 +21,6 @@ impl serde::Serialize for AbciQueryRequest { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.ABCIQueryRequest", len)?; if !self.data.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("data", pbjson::private::base64::encode(&self.data).as_str())?; } if !self.path.is_empty() { @@ -29,7 +28,6 @@ impl serde::Serialize for AbciQueryRequest { } if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if self.prove { @@ -169,12 +167,10 @@ impl serde::Serialize for BroadcastTxAsyncRequest { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.BroadcastTxAsyncRequest", len)?; if !self.params.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("params", pbjson::private::base64::encode(&self.params).as_str())?; } if self.req_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("reqId", ToString::to_string(&self.req_id).as_str())?; } struct_ser.end() @@ -296,12 +292,10 @@ impl serde::Serialize for BroadcastTxAsyncResponse { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.BroadcastTxAsyncResponse", len)?; if self.code != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("code", ToString::to_string(&self.code).as_str())?; } if !self.data.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("data", pbjson::private::base64::encode(&self.data).as_str())?; } if !self.log.is_empty() { @@ -309,7 +303,6 @@ impl serde::Serialize for BroadcastTxAsyncResponse { } if !self.hash.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("hash", pbjson::private::base64::encode(&self.hash).as_str())?; } struct_ser.end() @@ -448,12 +441,10 @@ impl serde::Serialize for BroadcastTxSyncRequest { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.BroadcastTxSyncRequest", len)?; if !self.params.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("params", pbjson::private::base64::encode(&self.params).as_str())?; } if self.req_id != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("reqId", ToString::to_string(&self.req_id).as_str())?; } struct_ser.end() @@ -575,12 +566,10 @@ impl serde::Serialize for BroadcastTxSyncResponse { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.BroadcastTxSyncResponse", len)?; if self.code != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("code", ToString::to_string(&self.code).as_str())?; } if !self.data.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("data", pbjson::private::base64::encode(&self.data).as_str())?; } if !self.log.is_empty() { @@ -588,7 +577,6 @@ impl serde::Serialize for BroadcastTxSyncResponse { } if !self.hash.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("hash", pbjson::private::base64::encode(&self.hash).as_str())?; } struct_ser.end() @@ -724,7 +712,6 @@ impl serde::Serialize for GetBlockByHeightRequest { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.GetBlockByHeightRequest", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } struct_ser.end() @@ -898,7 +885,6 @@ impl serde::Serialize for GetTxRequest { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.GetTxRequest", len)?; if !self.hash.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("hash", pbjson::private::base64::encode(&self.hash).as_str())?; } if self.prove { @@ -1023,17 +1009,14 @@ impl serde::Serialize for GetTxResponse { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.GetTxResponse", len)?; if !self.hash.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("hash", pbjson::private::base64::encode(&self.hash).as_str())?; } if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if self.index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("index", ToString::to_string(&self.index).as_str())?; } if let Some(v) = self.tx_result.as_ref() { @@ -1041,7 +1024,6 @@ impl serde::Serialize for GetTxResponse { } if !self.tx.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("tx", pbjson::private::base64::encode(&self.tx).as_str())?; } struct_ser.end() @@ -1203,17 +1185,14 @@ impl serde::Serialize for SyncInfo { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.SyncInfo", len)?; if !self.latest_block_hash.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("latestBlockHash", pbjson::private::base64::encode(&self.latest_block_hash).as_str())?; } if !self.latest_app_hash.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("latestAppHash", pbjson::private::base64::encode(&self.latest_app_hash).as_str())?; } if self.latest_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("latestBlockHeight", ToString::to_string(&self.latest_block_height).as_str())?; } if let Some(v) = self.latest_block_time.as_ref() { @@ -1377,12 +1356,10 @@ impl serde::Serialize for Tag { let mut struct_ser = serializer.serialize_struct("penumbra.util.tendermint_proxy.v1.Tag", len)?; if !self.key.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("key", pbjson::private::base64::encode(&self.key).as_str())?; } if !self.value.is_empty() { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("value", pbjson::private::base64::encode(&self.value).as_str())?; } if self.index { @@ -1520,12 +1497,10 @@ impl serde::Serialize for TxResult { } if self.gas_wanted != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("gasWanted", ToString::to_string(&self.gas_wanted).as_str())?; } if self.gas_used != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("gasUsed", ToString::to_string(&self.gas_used).as_str())?; } if !self.tags.is_empty() { diff --git a/crates/proto/src/gen/penumbra.view.v1.rs b/crates/proto/src/gen/penumbra.view.v1.rs index 8dd6ff3157..5ec54bf491 100644 --- a/crates/proto/src/gen/penumbra.view.v1.rs +++ b/crates/proto/src/gen/penumbra.view.v1.rs @@ -1,18 +1,16 @@ -// This file is @generated by prost-build. /// There's only one transparent address per wallet, so this request has no parameters; /// the message exists to satisfy forward-compatibility properties. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct TransparentAddressRequest {} impl ::prost::Name for TransparentAddressRequest { const NAME: &'static str = "TransparentAddressRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransparentAddressRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransparentAddressRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransparentAddressResponse { /// The raw (binary) transparent address @@ -26,14 +24,12 @@ impl ::prost::Name for TransparentAddressResponse { const NAME: &'static str = "TransparentAddressResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransparentAddressResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransparentAddressResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Filters in an `AuctionsRequest` will be combined using `AND` logic -- that /// is, the more filters you add, the fewer responses you're likely to get. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionsRequest { /// If present, filter balances to only include the account specified by the `AddressIndex`. @@ -57,12 +53,10 @@ impl ::prost::Name for AuctionsRequest { const NAME: &'static str = "AuctionsRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AuctionsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AuctionsRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuctionsResponse { #[prost(message, optional, tag = "1")] @@ -101,12 +95,10 @@ impl ::prost::Name for AuctionsResponse { const NAME: &'static str = "AuctionsResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AuctionsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AuctionsResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizeAndBuildRequest { /// The transaction plan to authorize and build. @@ -119,12 +111,10 @@ impl ::prost::Name for AuthorizeAndBuildRequest { const NAME: &'static str = "AuthorizeAndBuildRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AuthorizeAndBuildRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AuthorizeAndBuildRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AuthorizeAndBuildResponse { #[prost(oneof = "authorize_and_build_response::Status", tags = "1, 2")] @@ -133,7 +123,8 @@ pub struct AuthorizeAndBuildResponse { /// Nested message and enum types in `AuthorizeAndBuildResponse`. pub mod authorize_and_build_response { /// Signals that building is in progress. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct BuildProgress { /// An approximate progress of the build, from 0 to 1. #[prost(float, tag = "1")] @@ -143,13 +134,13 @@ pub mod authorize_and_build_response { const NAME: &'static str = "BuildProgress"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AuthorizeAndBuildResponse.BuildProgress".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AuthorizeAndBuildResponse.BuildProgress".into() + ::prost::alloc::format!( + "penumbra.view.v1.AuthorizeAndBuildResponse.{}", Self::NAME + ) } } /// Signals that the transaction is complete. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Complete { /// The finished transaction. @@ -162,12 +153,12 @@ pub mod authorize_and_build_response { const NAME: &'static str = "Complete"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AuthorizeAndBuildResponse.Complete".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AuthorizeAndBuildResponse.Complete".into() + ::prost::alloc::format!( + "penumbra.view.v1.AuthorizeAndBuildResponse.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Status { #[prost(message, tag = "1")] @@ -180,12 +171,10 @@ impl ::prost::Name for AuthorizeAndBuildResponse { const NAME: &'static str = "AuthorizeAndBuildResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AuthorizeAndBuildResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AuthorizeAndBuildResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastTransactionRequest { /// The transaction to broadcast. @@ -201,12 +190,10 @@ impl ::prost::Name for BroadcastTransactionRequest { const NAME: &'static str = "BroadcastTransactionRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.BroadcastTransactionRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.BroadcastTransactionRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastTransactionResponse { #[prost(oneof = "broadcast_transaction_response::Status", tags = "1, 2")] @@ -215,6 +202,7 @@ pub struct BroadcastTransactionResponse { /// Nested message and enum types in `BroadcastTransactionResponse`. pub mod broadcast_transaction_response { /// Signals that the transaction was broadcast successfully (but has not been confirmed). + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BroadcastSuccess { /// The hash of the transaction that was broadcast. @@ -227,15 +215,15 @@ pub mod broadcast_transaction_response { const NAME: &'static str = "BroadcastSuccess"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.BroadcastTransactionResponse.BroadcastSuccess".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.BroadcastTransactionResponse.BroadcastSuccess".into() + ::prost::alloc::format!( + "penumbra.view.v1.BroadcastTransactionResponse.{}", Self::NAME + ) } } /// Signals that the transaction has been confirmed on-chain and detected by the view server. /// /// Will not be sent unless await_detection was true. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Confirmed { /// The hash of the transaction that was broadcast. @@ -251,12 +239,12 @@ pub mod broadcast_transaction_response { const NAME: &'static str = "Confirmed"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.BroadcastTransactionResponse.Confirmed".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.BroadcastTransactionResponse.Confirmed".into() + ::prost::alloc::format!( + "penumbra.view.v1.BroadcastTransactionResponse.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Status { #[prost(message, tag = "1")] @@ -269,12 +257,10 @@ impl ::prost::Name for BroadcastTransactionResponse { const NAME: &'static str = "BroadcastTransactionResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.BroadcastTransactionResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.BroadcastTransactionResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionPlannerRequest { /// The expiry height for the requested TransactionPlan @@ -354,6 +340,7 @@ pub struct TransactionPlannerRequest { /// Nested message and enum types in `TransactionPlannerRequest`. pub mod transaction_planner_request { /// Request message subtypes + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Output { /// The amount and denomination in which the Output is issued. @@ -369,12 +356,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "Output"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.Output".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.Output".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Spend { /// The input amount and denomination in which the Spend is issued. @@ -390,12 +377,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "Spend"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.Spend".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.Spend".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Swap { /// The input amount and denomination to be traded in the Swap. @@ -421,12 +408,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "Swap"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.Swap".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.Swap".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapClaim { /// SwapCommitment to identify the Swap to be claimed. @@ -441,12 +428,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "SwapClaim"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.SwapClaim".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.SwapClaim".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Delegate { #[prost(message, optional, tag = "1")] @@ -460,12 +447,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "Delegate"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.Delegate".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.Delegate".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Undelegate { #[prost(message, optional, tag = "1")] @@ -479,12 +466,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "Undelegate"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.Undelegate".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.Undelegate".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UndelegateClaim { /// The identity key of the validator to finish undelegating from. @@ -516,12 +503,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "UndelegateClaim"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.UndelegateClaim".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.UndelegateClaim".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionOpen { /// Contains the data defining the position, sufficient to compute its `PositionId`. @@ -537,12 +524,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "PositionOpen"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.PositionOpen".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.PositionOpen".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionClose { /// The position to close. @@ -555,12 +542,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "PositionClose"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.PositionClose".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.PositionClose".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PositionWithdraw { /// The position to withdraw. @@ -583,12 +570,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "PositionWithdraw"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.PositionWithdraw".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.PositionWithdraw".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionSchedule { /// The description of the auction to schedule. @@ -601,14 +588,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "ActionDutchAuctionSchedule"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.ActionDutchAuctionSchedule" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.ActionDutchAuctionSchedule" - .into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionEnd { /// The unique id of the auction to close. @@ -621,12 +606,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "ActionDutchAuctionEnd"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.ActionDutchAuctionEnd".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.ActionDutchAuctionEnd".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ActionDutchAuctionWithdraw { /// The auction to withdraw funds from. @@ -642,14 +627,12 @@ pub mod transaction_planner_request { const NAME: &'static str = "ActionDutchAuctionWithdraw"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.ActionDutchAuctionWithdraw" - .into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.ActionDutchAuctionWithdraw" - .into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DelegatorVote { /// The proposal being voted on. @@ -676,13 +659,13 @@ pub mod transaction_planner_request { const NAME: &'static str = "DelegatorVote"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest.DelegatorVote".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest.DelegatorVote".into() + ::prost::alloc::format!( + "penumbra.view.v1.TransactionPlannerRequest.{}", Self::NAME + ) } } /// Specifies either that the planner should compute fees automatically or that it should use a fixed fee amount. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum FeeMode { /// Automatically compute a fee based on gas use. @@ -697,12 +680,10 @@ impl ::prost::Name for TransactionPlannerRequest { const NAME: &'static str = "TransactionPlannerRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionPlannerResponse { #[prost(message, optional, tag = "1")] @@ -714,12 +695,10 @@ impl ::prost::Name for TransactionPlannerResponse { const NAME: &'static str = "TransactionPlannerResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionPlannerResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionPlannerResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AddressByIndexRequest { #[prost(message, optional, tag = "1")] @@ -731,12 +710,10 @@ impl ::prost::Name for AddressByIndexRequest { const NAME: &'static str = "AddressByIndexRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AddressByIndexRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AddressByIndexRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AddressByIndexResponse { #[prost(message, optional, tag = "1")] @@ -746,24 +723,20 @@ impl ::prost::Name for AddressByIndexResponse { const NAME: &'static str = "AddressByIndexResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AddressByIndexResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AddressByIndexResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct WalletIdRequest {} impl ::prost::Name for WalletIdRequest { const NAME: &'static str = "WalletIdRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.WalletIdRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.WalletIdRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WalletIdResponse { #[prost(message, optional, tag = "1")] @@ -773,12 +746,10 @@ impl ::prost::Name for WalletIdResponse { const NAME: &'static str = "WalletIdResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.WalletIdResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.WalletIdResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct IndexByAddressRequest { #[prost(message, optional, tag = "1")] @@ -788,12 +759,10 @@ impl ::prost::Name for IndexByAddressRequest { const NAME: &'static str = "IndexByAddressRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.IndexByAddressRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.IndexByAddressRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct IndexByAddressResponse { /// Will be absent if given an address not viewable by this viewing service @@ -806,12 +775,10 @@ impl ::prost::Name for IndexByAddressResponse { const NAME: &'static str = "IndexByAddressResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.IndexByAddressResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.IndexByAddressResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EphemeralAddressRequest { #[prost(message, optional, tag = "1")] @@ -823,12 +790,10 @@ impl ::prost::Name for EphemeralAddressRequest { const NAME: &'static str = "EphemeralAddressRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.EphemeralAddressRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.EphemeralAddressRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EphemeralAddressResponse { #[prost(message, optional, tag = "1")] @@ -838,12 +803,10 @@ impl ::prost::Name for EphemeralAddressResponse { const NAME: &'static str = "EphemeralAddressResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.EphemeralAddressResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.EphemeralAddressResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BalancesRequest { /// If present, filter balances to only include the account specified by the `AddressIndex`. @@ -859,12 +822,10 @@ impl ::prost::Name for BalancesRequest { const NAME: &'static str = "BalancesRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.BalancesRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.BalancesRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BalancesResponse { /// Deprecated: use `account_address` instead. @@ -891,27 +852,23 @@ impl ::prost::Name for BalancesResponse { const NAME: &'static str = "BalancesResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.BalancesResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.BalancesResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests sync status of the view service. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct StatusRequest {} impl ::prost::Name for StatusRequest { const NAME: &'static str = "StatusRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.StatusRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.StatusRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Returns the status of the view service and whether it is synchronized with the chain state. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct StatusResponse { /// The height the view service has synchronized to so far when doing a full linear sync #[prost(uint64, tag = "1")] @@ -927,27 +884,23 @@ impl ::prost::Name for StatusResponse { const NAME: &'static str = "StatusResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.StatusResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.StatusResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests streaming updates on the sync height until the view service is synchronized. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct StatusStreamRequest {} impl ::prost::Name for StatusStreamRequest { const NAME: &'static str = "StatusStreamRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.StatusStreamRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.StatusStreamRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// A streaming sync status update -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct StatusStreamResponse { /// The latest known block height #[prost(uint64, tag = "1")] @@ -963,16 +916,14 @@ impl ::prost::Name for StatusStreamResponse { const NAME: &'static str = "StatusStreamResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.StatusStreamResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.StatusStreamResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// A query for notes known by the view service. /// /// This message uses the fact that all proto fields are optional /// to allow various filtering on the returned notes. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NotesRequest { /// If set, return spent notes as well as unspent notes. @@ -996,13 +947,11 @@ impl ::prost::Name for NotesRequest { const NAME: &'static str = "NotesRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.NotesRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.NotesRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// A query for notes to be used for voting on a proposal. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NotesForVotingRequest { /// The starting height of the proposal. @@ -1018,12 +967,10 @@ impl ::prost::Name for NotesForVotingRequest { const NAME: &'static str = "NotesForVotingRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.NotesForVotingRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.NotesForVotingRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WitnessRequest { /// The transaction plan to witness @@ -1036,12 +983,10 @@ impl ::prost::Name for WitnessRequest { const NAME: &'static str = "WitnessRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.WitnessRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.WitnessRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WitnessResponse { #[prost(message, optional, tag = "1")] @@ -1053,12 +998,10 @@ impl ::prost::Name for WitnessResponse { const NAME: &'static str = "WitnessResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.WitnessResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.WitnessResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WitnessAndBuildRequest { #[prost(message, optional, tag = "1")] @@ -1074,12 +1017,10 @@ impl ::prost::Name for WitnessAndBuildRequest { const NAME: &'static str = "WitnessAndBuildRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.WitnessAndBuildRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.WitnessAndBuildRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct WitnessAndBuildResponse { #[prost(oneof = "witness_and_build_response::Status", tags = "1, 2")] @@ -1088,7 +1029,8 @@ pub struct WitnessAndBuildResponse { /// Nested message and enum types in `WitnessAndBuildResponse`. pub mod witness_and_build_response { /// Signals that building is in progress. - #[derive(Clone, Copy, PartialEq, ::prost::Message)] + #[allow(clippy::derive_partial_eq_without_eq)] + #[derive(Clone, PartialEq, ::prost::Message)] pub struct BuildProgress { /// An approximate progress of the build, from 0 to 1. #[prost(float, tag = "1")] @@ -1098,13 +1040,13 @@ pub mod witness_and_build_response { const NAME: &'static str = "BuildProgress"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.WitnessAndBuildResponse.BuildProgress".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.WitnessAndBuildResponse.BuildProgress".into() + ::prost::alloc::format!( + "penumbra.view.v1.WitnessAndBuildResponse.{}", Self::NAME + ) } } /// Signals that the transaction is complete. + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Complete { /// The finished transaction. @@ -1117,12 +1059,12 @@ pub mod witness_and_build_response { const NAME: &'static str = "Complete"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.WitnessAndBuildResponse.Complete".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.WitnessAndBuildResponse.Complete".into() + ::prost::alloc::format!( + "penumbra.view.v1.WitnessAndBuildResponse.{}", Self::NAME + ) } } + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Status { #[prost(message, tag = "1")] @@ -1135,13 +1077,11 @@ impl ::prost::Name for WitnessAndBuildResponse { const NAME: &'static str = "WitnessAndBuildResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.WitnessAndBuildResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.WitnessAndBuildResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests all assets known to the view service. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetsRequest { /// If set to false (default), returns all assets, regardless of whether the rest of the fields of @@ -1173,13 +1113,11 @@ impl ::prost::Name for AssetsRequest { const NAME: &'static str = "AssetsRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AssetsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AssetsRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests all assets known to the view service. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetsResponse { #[prost(message, optional, tag = "2")] @@ -1189,25 +1127,21 @@ impl ::prost::Name for AssetsResponse { const NAME: &'static str = "AssetsResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AssetsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AssetsResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests the current app parameters from the view service. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct AppParametersRequest {} impl ::prost::Name for AppParametersRequest { const NAME: &'static str = "AppParametersRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AppParametersRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AppParametersRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AppParametersResponse { #[prost(message, optional, tag = "1")] @@ -1217,25 +1151,21 @@ impl ::prost::Name for AppParametersResponse { const NAME: &'static str = "AppParametersResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AppParametersResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AppParametersResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests the current gas prices from the view service. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct GasPricesRequest {} impl ::prost::Name for GasPricesRequest { const NAME: &'static str = "GasPricesRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.GasPricesRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.GasPricesRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct GasPricesResponse { /// The current gas prices, in the preferred (native) token. @@ -1253,26 +1183,22 @@ impl ::prost::Name for GasPricesResponse { const NAME: &'static str = "GasPricesResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.GasPricesResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.GasPricesResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests the current FMD parameters from the view service. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct FmdParametersRequest {} impl ::prost::Name for FmdParametersRequest { const NAME: &'static str = "FMDParametersRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.FMDParametersRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.FMDParametersRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct FmdParametersResponse { #[prost(message, optional, tag = "1")] pub parameters: ::core::option::Option< @@ -1283,12 +1209,10 @@ impl ::prost::Name for FmdParametersResponse { const NAME: &'static str = "FMDParametersResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.FMDParametersResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.FMDParametersResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NoteByCommitmentRequest { #[prost(message, optional, tag = "2")] @@ -1303,12 +1227,10 @@ impl ::prost::Name for NoteByCommitmentRequest { const NAME: &'static str = "NoteByCommitmentRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.NoteByCommitmentRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.NoteByCommitmentRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NoteByCommitmentResponse { #[prost(message, optional, tag = "1")] @@ -1318,12 +1240,10 @@ impl ::prost::Name for NoteByCommitmentResponse { const NAME: &'static str = "NoteByCommitmentResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.NoteByCommitmentResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.NoteByCommitmentResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapByCommitmentRequest { #[prost(message, optional, tag = "2")] @@ -1338,12 +1258,10 @@ impl ::prost::Name for SwapByCommitmentRequest { const NAME: &'static str = "SwapByCommitmentRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.SwapByCommitmentRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.SwapByCommitmentRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapByCommitmentResponse { #[prost(message, optional, tag = "1")] @@ -1353,24 +1271,20 @@ impl ::prost::Name for SwapByCommitmentResponse { const NAME: &'static str = "SwapByCommitmentResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.SwapByCommitmentResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.SwapByCommitmentResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct UnclaimedSwapsRequest {} impl ::prost::Name for UnclaimedSwapsRequest { const NAME: &'static str = "UnclaimedSwapsRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.UnclaimedSwapsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.UnclaimedSwapsRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UnclaimedSwapsResponse { #[prost(message, optional, tag = "1")] @@ -1380,12 +1294,10 @@ impl ::prost::Name for UnclaimedSwapsResponse { const NAME: &'static str = "UnclaimedSwapsResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.UnclaimedSwapsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.UnclaimedSwapsResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NullifierStatusRequest { #[prost(message, optional, tag = "2")] @@ -1399,13 +1311,11 @@ impl ::prost::Name for NullifierStatusRequest { const NAME: &'static str = "NullifierStatusRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.NullifierStatusRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.NullifierStatusRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct NullifierStatusResponse { #[prost(bool, tag = "1")] pub spent: bool, @@ -1414,12 +1324,10 @@ impl ::prost::Name for NullifierStatusResponse { const NAME: &'static str = "NullifierStatusResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.NullifierStatusResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.NullifierStatusResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionInfoByHashRequest { /// The transaction hash to query for. @@ -1430,13 +1338,11 @@ impl ::prost::Name for TransactionInfoByHashRequest { const NAME: &'static str = "TransactionInfoByHashRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionInfoByHashRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionInfoByHashRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionInfoRequest { /// If present, return only transactions after this height. #[prost(uint64, tag = "1")] @@ -1449,12 +1355,10 @@ impl ::prost::Name for TransactionInfoRequest { const NAME: &'static str = "TransactionInfoRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionInfoRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionInfoRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionInfo { /// The height the transaction was included in a block, if known. @@ -1483,12 +1387,10 @@ impl ::prost::Name for TransactionInfo { const NAME: &'static str = "TransactionInfo"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionInfo".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionInfoResponse { #[prost(message, optional, tag = "1")] @@ -1498,12 +1400,10 @@ impl ::prost::Name for TransactionInfoResponse { const NAME: &'static str = "TransactionInfoResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionInfoResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionInfoResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransactionInfoByHashResponse { #[prost(message, optional, tag = "1")] @@ -1513,12 +1413,10 @@ impl ::prost::Name for TransactionInfoByHashResponse { const NAME: &'static str = "TransactionInfoByHashResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.TransactionInfoByHashResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.TransactionInfoByHashResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NotesResponse { #[prost(message, optional, tag = "1")] @@ -1528,12 +1426,10 @@ impl ::prost::Name for NotesResponse { const NAME: &'static str = "NotesResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.NotesResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.NotesResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NotesForVotingResponse { #[prost(message, optional, tag = "1")] @@ -1545,13 +1441,11 @@ impl ::prost::Name for NotesForVotingResponse { const NAME: &'static str = "NotesForVotingResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.NotesForVotingResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.NotesForVotingResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// A note plaintext with associated metadata about its status. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SpendableNoteRecord { /// The note commitment, identifying the note. @@ -1598,12 +1492,10 @@ impl ::prost::Name for SpendableNoteRecord { const NAME: &'static str = "SpendableNoteRecord"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.SpendableNoteRecord".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.SpendableNoteRecord".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SwapRecord { #[prost(message, optional, tag = "1")] @@ -1635,12 +1527,10 @@ impl ::prost::Name for SwapRecord { const NAME: &'static str = "SwapRecord"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.SwapRecord".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.SwapRecord".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct OwnedPositionIdsRequest { /// If present, return only positions with this position state. @@ -1658,12 +1548,10 @@ impl ::prost::Name for OwnedPositionIdsRequest { const NAME: &'static str = "OwnedPositionIdsRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.OwnedPositionIdsRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.OwnedPositionIdsRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct OwnedPositionIdsResponse { #[prost(message, optional, tag = "1")] @@ -1675,13 +1563,11 @@ impl ::prost::Name for OwnedPositionIdsResponse { const NAME: &'static str = "OwnedPositionIdsResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.OwnedPositionIdsResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.OwnedPositionIdsResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests information on an asset by asset id +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetMetadataByIdRequest { /// The asset id to request information on. @@ -1692,12 +1578,10 @@ impl ::prost::Name for AssetMetadataByIdRequest { const NAME: &'static str = "AssetMetadataByIdRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AssetMetadataByIdRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AssetMetadataByIdRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct AssetMetadataByIdResponse { /// If present, information on the requested asset. @@ -1710,15 +1594,13 @@ impl ::prost::Name for AssetMetadataByIdResponse { const NAME: &'static str = "AssetMetadataByIdResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.AssetMetadataByIdResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.AssetMetadataByIdResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests `ValueView`s of delegation tokens for the given address index. The /// returned `ValueView`s will include the `ValidatorInfo` for the delegated /// validator in their `extended_metadata` fields. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DelegationsByAddressIndexRequest { /// The address index to fetch delegation balances for. @@ -1763,11 +1645,11 @@ pub mod delegations_by_address_index_request { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "FILTER_UNSPECIFIED", - Self::AllActiveWithNonzeroBalances => { + Filter::Unspecified => "FILTER_UNSPECIFIED", + Filter::AllActiveWithNonzeroBalances => { "FILTER_ALL_ACTIVE_WITH_NONZERO_BALANCES" } - Self::All => "FILTER_ALL", + Filter::All => "FILTER_ALL", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -1787,15 +1669,13 @@ impl ::prost::Name for DelegationsByAddressIndexRequest { const NAME: &'static str = "DelegationsByAddressIndexRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.DelegationsByAddressIndexRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.DelegationsByAddressIndexRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Contains a `ValueView` of delegation tokens for the requested address index. /// The `ValueView` includes the `ValidatorInfo` for the delegated validator in /// cits `extended_metadata` field. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DelegationsByAddressIndexResponse { #[prost(message, optional, tag = "1")] @@ -1805,14 +1685,12 @@ impl ::prost::Name for DelegationsByAddressIndexResponse { const NAME: &'static str = "DelegationsByAddressIndexResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.DelegationsByAddressIndexResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.DelegationsByAddressIndexResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Requests unbonding tokens for a given address index, with optional filtering /// for whether the tokens are currently claimable. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UnbondingTokensByAddressIndexRequest { #[prost( @@ -1860,9 +1738,9 @@ pub mod unbonding_tokens_by_address_index_request { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unspecified => "FILTER_UNSPECIFIED", - Self::Claimable => "FILTER_CLAIMABLE", - Self::NotYetClaimable => "FILTER_NOT_YET_CLAIMABLE", + Filter::Unspecified => "FILTER_UNSPECIFIED", + Filter::Claimable => "FILTER_CLAIMABLE", + Filter::NotYetClaimable => "FILTER_NOT_YET_CLAIMABLE", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -1880,14 +1758,12 @@ impl ::prost::Name for UnbondingTokensByAddressIndexRequest { const NAME: &'static str = "UnbondingTokensByAddressIndexRequest"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.UnbondingTokensByAddressIndexRequest".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.UnbondingTokensByAddressIndexRequest".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Returns unbonding tokens for the given address index, optionally filtered by /// whether the tokens are currently claimable. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct UnbondingTokensByAddressIndexResponse { /// A `ValueView` representing the amount of the given unbonding token. @@ -1903,22 +1779,13 @@ impl ::prost::Name for UnbondingTokensByAddressIndexResponse { const NAME: &'static str = "UnbondingTokensByAddressIndexResponse"; const PACKAGE: &'static str = "penumbra.view.v1"; fn full_name() -> ::prost::alloc::string::String { - "penumbra.view.v1.UnbondingTokensByAddressIndexResponse".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/penumbra.view.v1.UnbondingTokensByAddressIndexResponse".into() + ::prost::alloc::format!("penumbra.view.v1.{}", Self::NAME) } } /// Generated client implementations. #[cfg(feature = "rpc")] pub mod view_service_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; /// The view RPC is used by a view client, who wants to do some @@ -1944,8 +1811,8 @@ pub mod view_service_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -1970,7 +1837,7 @@ pub mod view_service_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { ViewServiceClient::new(InterceptedService::new(inner, interceptor)) } @@ -2014,7 +1881,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2040,7 +1908,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2066,7 +1935,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2091,7 +1961,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2119,7 +1990,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2148,7 +2020,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2175,7 +2048,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2202,7 +2076,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2227,7 +2102,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2254,7 +2130,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2281,7 +2158,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2308,7 +2186,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2333,7 +2212,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2360,7 +2240,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2388,7 +2269,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2413,7 +2295,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2440,7 +2323,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2467,7 +2351,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2494,7 +2379,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2521,7 +2407,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2552,7 +2439,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2579,7 +2467,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2606,7 +2495,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2638,7 +2528,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2669,7 +2560,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2706,7 +2598,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2737,7 +2630,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2772,7 +2666,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2805,7 +2700,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2835,7 +2731,8 @@ pub mod view_service_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -2853,17 +2750,11 @@ pub mod view_service_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod view_service_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with ViewServiceServer. #[async_trait] - pub trait ViewService: std::marker::Send + std::marker::Sync + 'static { + pub trait ViewService: Send + Sync + 'static { /// Get current status of chain sync async fn status( &self, @@ -2873,7 +2764,7 @@ pub mod view_service_server { type StatusStreamStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Stream sync status updates until the view service has caught up with the chain. /// Returns a stream of `StatusStreamResponse`s. @@ -2888,7 +2779,7 @@ pub mod view_service_server { type NotesStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Queries for notes that have been accepted by the chain. /// Returns a stream of `NotesResponse`s. @@ -2900,7 +2791,7 @@ pub mod view_service_server { type NotesForVotingStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Returns a stream of `NotesForVotingResponse`s. async fn notes_for_voting( @@ -2914,7 +2805,7 @@ pub mod view_service_server { type AssetsStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Queries for metadata about known assets. /// Returns a stream of `AssetsResponse`s. @@ -3002,7 +2893,7 @@ pub mod view_service_server { type BalancesStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Query for balance of a given address. /// Returns a stream of `BalancesResponses`. @@ -3030,7 +2921,7 @@ pub mod view_service_server { type UnclaimedSwapsStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Query for all unclaimed swaps. async fn unclaimed_swaps( @@ -3060,7 +2951,7 @@ pub mod view_service_server { type TransactionInfoStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Query for the full transactions in the given range of blocks. /// Returns a stream of `TransactionInfoResponse`s. @@ -3078,7 +2969,7 @@ pub mod view_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Query for owned position IDs for the given trading pair and in the given position state. async fn owned_position_ids( @@ -3110,7 +3001,7 @@ pub mod view_service_server { type WitnessAndBuildStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Like `Witness`, but immediately uses the witness data to build (prove) the transaction. /// @@ -3133,7 +3024,7 @@ pub mod view_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Authorize a transaction plan and build the transaction. /// @@ -3160,7 +3051,7 @@ pub mod view_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Broadcast a transaction to the network, optionally waiting for full confirmation. /// @@ -3179,7 +3070,7 @@ pub mod view_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Get delegation tokens for a given address index. Each delegation token will /// be represented by a `ValueView` with the given address index's balance of @@ -3199,7 +3090,7 @@ pub mod view_service_server { tonic::Status, >, > - + std::marker::Send + + Send + 'static; /// Get unbonding tokens for the given address index, optionally filtered by /// whether the tokens are currently claimable. @@ -3214,7 +3105,7 @@ pub mod view_service_server { type AuctionsStream: tonic::codegen::tokio_stream::Stream< Item = std::result::Result, > - + std::marker::Send + + Send + 'static; /// Gets the auctions controlled by the user's wallet. async fn auctions( @@ -3227,18 +3118,20 @@ pub mod view_service_server { /// responsible for synchronizing and scanning the public chain state with one or /// more full viewing keys. #[derive(Debug)] - pub struct ViewServiceServer { - inner: Arc, + pub struct ViewServiceServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl ViewServiceServer { + struct _Inner(Arc); + impl ViewServiceServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -3288,8 +3181,8 @@ pub mod view_service_server { impl tonic::codegen::Service> for ViewServiceServer where T: ViewService, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -3301,6 +3194,7 @@ pub mod view_service_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/penumbra.view.v1.ViewService/Status" => { #[allow(non_camel_case_types)] @@ -3331,6 +3225,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = StatusSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3377,6 +3272,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = StatusStreamSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3423,6 +3319,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = NotesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3469,6 +3366,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = NotesForVotingSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3515,6 +3413,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AssetsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3561,6 +3460,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AssetMetadataByIdSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3606,6 +3506,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AppParametersSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3651,6 +3552,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = GasPricesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3696,6 +3598,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = FMDParametersSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3741,6 +3644,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AddressByIndexSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3787,6 +3691,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TransparentAddressSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3832,6 +3737,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = WalletIdSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3877,6 +3783,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = IndexByAddressSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3922,6 +3829,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = EphemeralAddressSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -3968,6 +3876,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = BalancesSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4014,6 +3923,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = NoteByCommitmentSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4060,6 +3970,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = SwapByCommitmentSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4106,6 +4017,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = UnclaimedSwapsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4151,6 +4063,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = NullifierStatusSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4200,6 +4113,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TransactionInfoByHashSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4247,6 +4161,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TransactionInfoSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4295,6 +4210,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = OwnedPositionIdsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4341,6 +4257,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = TransactionPlannerSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4386,6 +4303,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = WitnessSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4433,6 +4351,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = WitnessAndBuildSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4481,6 +4400,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AuthorizeAndBuildSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4529,6 +4449,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = BroadcastTransactionSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4582,6 +4503,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = DelegationsByAddressIndexSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4635,6 +4557,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = UnbondingTokensByAddressIndexSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4681,6 +4604,7 @@ pub mod view_service_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = AuctionsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -4699,25 +4623,20 @@ pub mod view_service_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for ViewServiceServer { + impl Clone for ViewServiceServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -4729,9 +4648,17 @@ pub mod view_service_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "penumbra.view.v1.ViewService"; - impl tonic::server::NamedService for ViewServiceServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for ViewServiceServer { + const NAME: &'static str = "penumbra.view.v1.ViewService"; } } diff --git a/crates/proto/src/gen/penumbra.view.v1.serde.rs b/crates/proto/src/gen/penumbra.view.v1.serde.rs index 06c180beee..d5b11ff8db 100644 --- a/crates/proto/src/gen/penumbra.view.v1.serde.rs +++ b/crates/proto/src/gen/penumbra.view.v1.serde.rs @@ -1035,7 +1035,6 @@ impl serde::Serialize for AuctionsResponse { } if self.local_seq != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("localSeq", ToString::to_string(&self.local_seq).as_str())?; } struct_ser.end() @@ -2172,7 +2171,6 @@ impl serde::Serialize for broadcast_transaction_response::Confirmed { } if self.detection_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("detectionHeight", ToString::to_string(&self.detection_height).as_str())?; } struct_ser.end() @@ -3516,7 +3514,6 @@ impl serde::Serialize for NotesForVotingRequest { let mut struct_ser = serializer.serialize_struct("penumbra.view.v1.NotesForVotingRequest", len)?; if self.votable_at_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("votableAtHeight", ToString::to_string(&self.votable_at_height).as_str())?; } if let Some(v) = self.address_index.as_ref() { @@ -4445,17 +4442,14 @@ impl serde::Serialize for SpendableNoteRecord { } if self.height_created != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("heightCreated", ToString::to_string(&self.height_created).as_str())?; } if self.height_spent != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("heightSpent", ToString::to_string(&self.height_spent).as_str())?; } if self.position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("position", ToString::to_string(&self.position).as_str())?; } if let Some(v) = self.source.as_ref() { @@ -4735,12 +4729,10 @@ impl serde::Serialize for StatusResponse { let mut struct_ser = serializer.serialize_struct("penumbra.view.v1.StatusResponse", len)?; if self.full_sync_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("fullSyncHeight", ToString::to_string(&self.full_sync_height).as_str())?; } if self.partial_sync_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("partialSyncHeight", ToString::to_string(&self.partial_sync_height).as_str())?; } if self.catching_up { @@ -4947,17 +4939,14 @@ impl serde::Serialize for StatusStreamResponse { let mut struct_ser = serializer.serialize_struct("penumbra.view.v1.StatusStreamResponse", len)?; if self.latest_known_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("latestKnownBlockHeight", ToString::to_string(&self.latest_known_block_height).as_str())?; } if self.full_sync_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("fullSyncHeight", ToString::to_string(&self.full_sync_height).as_str())?; } if self.partial_sync_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("partialSyncHeight", ToString::to_string(&self.partial_sync_height).as_str())?; } struct_ser.end() @@ -5318,7 +5307,6 @@ impl serde::Serialize for SwapRecord { } if self.position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("position", ToString::to_string(&self.position).as_str())?; } if let Some(v) = self.nullifier.as_ref() { @@ -5329,7 +5317,6 @@ impl serde::Serialize for SwapRecord { } if self.height_claimed != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("heightClaimed", ToString::to_string(&self.height_claimed).as_str())?; } if let Some(v) = self.source.as_ref() { @@ -5514,7 +5501,6 @@ impl serde::Serialize for TransactionInfo { let mut struct_ser = serializer.serialize_struct("penumbra.view.v1.TransactionInfo", len)?; if self.height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("height", ToString::to_string(&self.height).as_str())?; } if let Some(v) = self.id.as_ref() { @@ -5863,12 +5849,10 @@ impl serde::Serialize for TransactionInfoRequest { let mut struct_ser = serializer.serialize_struct("penumbra.view.v1.TransactionInfoRequest", len)?; if self.start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startHeight", ToString::to_string(&self.start_height).as_str())?; } if self.end_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("endHeight", ToString::to_string(&self.end_height).as_str())?; } struct_ser.end() @@ -6141,7 +6125,6 @@ impl serde::Serialize for TransactionPlannerRequest { let mut struct_ser = serializer.serialize_struct("penumbra.view.v1.TransactionPlannerRequest", len)?; if self.expiry_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("expiryHeight", ToString::to_string(&self.expiry_height).as_str())?; } if let Some(v) = self.memo.as_ref() { @@ -6200,7 +6183,6 @@ impl serde::Serialize for TransactionPlannerRequest { } if self.epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("epochIndex", ToString::to_string(&self.epoch_index).as_str())?; } if let Some(v) = self.epoch.as_ref() { @@ -6768,7 +6750,6 @@ impl serde::Serialize for transaction_planner_request::ActionDutchAuctionWithdra } if self.seq != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("seq", ToString::to_string(&self.seq).as_str())?; } struct_ser.end() @@ -7004,7 +6985,6 @@ impl serde::Serialize for transaction_planner_request::DelegatorVote { let mut struct_ser = serializer.serialize_struct("penumbra.view.v1.TransactionPlannerRequest.DelegatorVote", len)?; if self.proposal != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("proposal", ToString::to_string(&self.proposal).as_str())?; } if let Some(v) = self.vote.as_ref() { @@ -7012,12 +6992,10 @@ impl serde::Serialize for transaction_planner_request::DelegatorVote { } if self.start_block_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startBlockHeight", ToString::to_string(&self.start_block_height).as_str())?; } if self.start_position != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startPosition", ToString::to_string(&self.start_position).as_str())?; } if !self.rate_data.is_empty() { @@ -8088,7 +8066,6 @@ impl serde::Serialize for transaction_planner_request::UndelegateClaim { } if self.start_epoch_index != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("startEpochIndex", ToString::to_string(&self.start_epoch_index).as_str())?; } if let Some(v) = self.penalty.as_ref() { @@ -8099,7 +8076,6 @@ impl serde::Serialize for transaction_planner_request::UndelegateClaim { } if self.unbonding_start_height != 0 { #[allow(clippy::needless_borrow)] - #[allow(clippy::needless_borrows_for_generic_args)] struct_ser.serialize_field("unbondingStartHeight", ToString::to_string(&self.unbonding_start_height).as_str())?; } struct_ser.end() diff --git a/crates/proto/src/gen/proto_descriptor.bin.no_lfs b/crates/proto/src/gen/proto_descriptor.bin.no_lfs index 9bf7426e3a803c4b30f2451b722a70dd6486ecdd..895f0f725959cf9de6b287980a762aa4d11be6ff 100644 GIT binary patch delta 5555 zcmcIoOK;rP6&`Xpud6pD*)rugH^gH)n&Ws`{Rnc@IU_SEP z^Zd@ahx+=rCw}wwiD%D^=jQY5bL$VePp4jd{$^=@e(^6eY^ovr=K5;ro?i34iW_>( z_0z309Z+tBK^)klsaUx!R&JefwiCZ}GMV^=!H(Vx{Cgtws-C-A7w?I!dv3icmc`Si zh+=z*WuJ(xF@qZ?d(K`g@2RZ(eHzNpxJ){nahAnzdxYgY-xnctZh!7)dy+2<_R>BY zqDJ6H!oJONEpke`arwVyAtes!W*3L`eoj zks!xGbu5c##bpI1es8eB&RHG7)twQ{c13VGtscrk$vNZINep3 z{dL?VZ)FPK$=J&loNqbrvts&iXNF2A;D1HbMJy_sC@FMqGS<}hM%UcungN|)`dY=f|&MkhE8&0}MK|3YOLMQy^1{>Y8A?6T&&a>rm2MC$jHzN_$adJzey(Pwmp8lyu3Y24DS8qH?||GjoQ0hvg(k?8$x3 zb<^89{{2F!)^|VUEN@$xOtd}pPirJO#!Af0X6&q$Il~w;^BLR9XGRVD&gL_Qoy`|V z*apKh6GA2Pb?2x2ZGkcA)@K{dun|OpKM`BAEt-QoirrXnOz3cnFWz37;c?B4`I_tV zRl%E)sPH&YuNim4RWD9L7ot!QanFae;P2kJ12sYDJ{D!hdRdMv!;ezFKSd_cz_Ao% zflggwAO|@K$v$({X&%tY$k6U186q zzln*LL*{$V#x94|>_f`_hxAIvwW2*UoTMqH#0}+#w3E^+`*@1-cz1d}MF?Q%!Vu|jvS;DbNWMqg%_37Hy<;hYPa~73QbcaNdXZI`X@S5;GA|p>Pxx&SR)cUof;XKU zDssAyUC)nVo)G0w&E3VEVIVAdK(ND#-4^$$=ZTZV1Dk~tw8*&;%|Qk;^m8szGPc^uQ49B=7z*DIS+1(5jnDSdSN#?9vp}q zI1O?vXmS9-)Pg1l5DYDV98{7l)#tJ~hmtIAfAwu~IH6M0$QGMk6r+*#%|epUdD*1K z$PuE0)32onIxm+`vuntgWm|7#77b^in{azfXgOVGh%T6aG(AWr@isMc(3IIXiVjyRdDuF7k%8t&o_P9pDDG?ME49ctw%lw0Ou=Np4B%1Z@z3JNZ>rx54n|9AqIWX*%^RD~aCN?sx&}I|E|2#0yt`{U&fjE;68hxXm7_gxLD>4?vy>bxO+QeUh@^MhSt61qcT1x>g~ZakV@G6< z7i{Z(CZZl+>>>rRQ&ktY!~wWjfPwqPQE0TR0!`Bgrkiyx@auK4j_;%h8&M)ly_1tf zNiONH2Zy4IbQ5Zz^Z~QwIsrtCe`u*~K|+N1&|>mRmKfp>d-^*VQ9c}MA5W-@AL%hp z@|2=SdW=g<@AVj$80Q~hr2N{zIztbzimh0q&XF9$oYLLzDPl z^Agz*!3G{<9%D*2y{M*EEiL!m{L3NOy~QcTvQO2Q2O3m%9tcZ|iF6WCtysklMpP@M zLA7*A0feHvAa~VQhRoWDi*DVy0dkuh+u1MM3+h~L>0@{P5{EG zq|(l{T5>9oR$i+$r-Hdjn`0qsP^m<_RDd`yx{i}M(JmFYemr;OHj$PmHWrXQH?H*SSvI zxU4>Utb!~?U5^GJ*sQ}Q?E*lsSck<~B1aK@9GGF&d0Cp2|L;&kfymuft|v3Zp(?;t zNOxv|Vrig+N-s8!j{w!Chj;X{Pu<->Fxo)%)H#&ysdYr3GqI^Wwd6N2RFcl81F25R g&eOwVzkD{8!(SeM1^g9XJew-@J1_nim%jM;->DfaF#rGn delta 53 zcmZ2_S^e@d^@bM47N!>F7M3lnovQ5ug{(l#2E^<@%&}ddkdswvd#5U^sp)pnZq5mD E0Poxo{Qv*} diff --git a/crates/proto/src/gen/tendermint.abci.rs b/crates/proto/src/gen/tendermint.abci.rs index f2035dd630..2cb2c6e801 100644 --- a/crates/proto/src/gen/tendermint.abci.rs +++ b/crates/proto/src/gen/tendermint.abci.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Request { #[prost( @@ -9,6 +9,7 @@ pub struct Request { } /// Nested message and enum types in `Request`. pub mod request { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Value { #[prost(message, tag = "1")] @@ -49,12 +50,10 @@ impl ::prost::Name for Request { const NAME: &'static str = "Request"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.Request".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.Request".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestEcho { #[prost(string, tag = "1")] @@ -64,24 +63,20 @@ impl ::prost::Name for RequestEcho { const NAME: &'static str = "RequestEcho"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestEcho".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestEcho".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestFlush {} impl ::prost::Name for RequestFlush { const NAME: &'static str = "RequestFlush"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestFlush".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestFlush".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestInfo { #[prost(string, tag = "1")] @@ -97,12 +92,10 @@ impl ::prost::Name for RequestInfo { const NAME: &'static str = "RequestInfo"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestInfo".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestInitChain { #[prost(message, optional, tag = "1")] @@ -122,12 +115,10 @@ impl ::prost::Name for RequestInitChain { const NAME: &'static str = "RequestInitChain"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestInitChain".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestInitChain".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestQuery { #[prost(bytes = "vec", tag = "1")] @@ -143,12 +134,10 @@ impl ::prost::Name for RequestQuery { const NAME: &'static str = "RequestQuery"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestQuery".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestQuery".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestBeginBlock { #[prost(bytes = "vec", tag = "1")] @@ -164,12 +153,10 @@ impl ::prost::Name for RequestBeginBlock { const NAME: &'static str = "RequestBeginBlock"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestBeginBlock".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestBeginBlock".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestCheckTx { #[prost(bytes = "vec", tag = "1")] @@ -181,12 +168,10 @@ impl ::prost::Name for RequestCheckTx { const NAME: &'static str = "RequestCheckTx"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestCheckTx".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestCheckTx".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestDeliverTx { #[prost(bytes = "vec", tag = "1")] @@ -196,13 +181,11 @@ impl ::prost::Name for RequestDeliverTx { const NAME: &'static str = "RequestDeliverTx"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestDeliverTx".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestDeliverTx".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestEndBlock { #[prost(int64, tag = "1")] pub height: i64, @@ -211,38 +194,32 @@ impl ::prost::Name for RequestEndBlock { const NAME: &'static str = "RequestEndBlock"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestEndBlock".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestEndBlock".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestCommit {} impl ::prost::Name for RequestCommit { const NAME: &'static str = "RequestCommit"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestCommit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestCommit".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// lists available snapshots -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestListSnapshots {} impl ::prost::Name for RequestListSnapshots { const NAME: &'static str = "RequestListSnapshots"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestListSnapshots".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestListSnapshots".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// offers a snapshot to the application +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestOfferSnapshot { /// snapshot offered by peers @@ -256,14 +233,12 @@ impl ::prost::Name for RequestOfferSnapshot { const NAME: &'static str = "RequestOfferSnapshot"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestOfferSnapshot".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestOfferSnapshot".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// loads a snapshot chunk -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestLoadSnapshotChunk { #[prost(uint64, tag = "1")] pub height: u64, @@ -276,13 +251,11 @@ impl ::prost::Name for RequestLoadSnapshotChunk { const NAME: &'static str = "RequestLoadSnapshotChunk"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestLoadSnapshotChunk".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestLoadSnapshotChunk".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// Applies a snapshot chunk +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestApplySnapshotChunk { #[prost(uint32, tag = "1")] @@ -296,12 +269,10 @@ impl ::prost::Name for RequestApplySnapshotChunk { const NAME: &'static str = "RequestApplySnapshotChunk"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestApplySnapshotChunk".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestApplySnapshotChunk".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestPrepareProposal { /// the modified transactions cannot exceed this size. @@ -329,12 +300,10 @@ impl ::prost::Name for RequestPrepareProposal { const NAME: &'static str = "RequestPrepareProposal"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestPrepareProposal".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestPrepareProposal".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct RequestProcessProposal { #[prost(bytes = "vec", repeated, tag = "1")] @@ -360,12 +329,10 @@ impl ::prost::Name for RequestProcessProposal { const NAME: &'static str = "RequestProcessProposal"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.RequestProcessProposal".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.RequestProcessProposal".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Response { #[prost( @@ -376,6 +343,7 @@ pub struct Response { } /// Nested message and enum types in `Response`. pub mod response { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Value { #[prost(message, tag = "1")] @@ -418,13 +386,11 @@ impl ::prost::Name for Response { const NAME: &'static str = "Response"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.Response".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.Response".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// nondeterministic +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseException { #[prost(string, tag = "1")] @@ -434,12 +400,10 @@ impl ::prost::Name for ResponseException { const NAME: &'static str = "ResponseException"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseException".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseException".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseEcho { #[prost(string, tag = "1")] @@ -449,24 +413,20 @@ impl ::prost::Name for ResponseEcho { const NAME: &'static str = "ResponseEcho"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseEcho".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseEcho".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseFlush {} impl ::prost::Name for ResponseFlush { const NAME: &'static str = "ResponseFlush"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseFlush".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseFlush".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseInfo { #[prost(string, tag = "1")] @@ -484,12 +444,10 @@ impl ::prost::Name for ResponseInfo { const NAME: &'static str = "ResponseInfo"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseInfo".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseInitChain { #[prost(message, optional, tag = "1")] @@ -503,12 +461,10 @@ impl ::prost::Name for ResponseInitChain { const NAME: &'static str = "ResponseInitChain"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseInitChain".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseInitChain".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseQuery { #[prost(uint32, tag = "1")] @@ -538,12 +494,10 @@ impl ::prost::Name for ResponseQuery { const NAME: &'static str = "ResponseQuery"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseQuery".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseQuery".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseBeginBlock { #[prost(message, repeated, tag = "1")] @@ -553,12 +507,10 @@ impl ::prost::Name for ResponseBeginBlock { const NAME: &'static str = "ResponseBeginBlock"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseBeginBlock".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseBeginBlock".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseCheckTx { #[prost(uint32, tag = "1")] @@ -592,12 +544,10 @@ impl ::prost::Name for ResponseCheckTx { const NAME: &'static str = "ResponseCheckTx"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseCheckTx".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseCheckTx".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseDeliverTx { #[prost(uint32, tag = "1")] @@ -624,12 +574,10 @@ impl ::prost::Name for ResponseDeliverTx { const NAME: &'static str = "ResponseDeliverTx"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseDeliverTx".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseDeliverTx".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseEndBlock { #[prost(message, repeated, tag = "1")] @@ -643,12 +591,10 @@ impl ::prost::Name for ResponseEndBlock { const NAME: &'static str = "ResponseEndBlock"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseEndBlock".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseEndBlock".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseCommit { /// reserve 1 @@ -661,12 +607,10 @@ impl ::prost::Name for ResponseCommit { const NAME: &'static str = "ResponseCommit"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseCommit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseCommit".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseListSnapshots { #[prost(message, repeated, tag = "1")] @@ -676,13 +620,11 @@ impl ::prost::Name for ResponseListSnapshots { const NAME: &'static str = "ResponseListSnapshots"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseListSnapshots".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseListSnapshots".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseOfferSnapshot { #[prost(enumeration = "response_offer_snapshot::Result", tag = "1")] pub result: i32, @@ -722,12 +664,12 @@ pub mod response_offer_snapshot { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unknown => "UNKNOWN", - Self::Accept => "ACCEPT", - Self::Abort => "ABORT", - Self::Reject => "REJECT", - Self::RejectFormat => "REJECT_FORMAT", - Self::RejectSender => "REJECT_SENDER", + Result::Unknown => "UNKNOWN", + Result::Accept => "ACCEPT", + Result::Abort => "ABORT", + Result::Reject => "REJECT", + Result::RejectFormat => "REJECT_FORMAT", + Result::RejectSender => "REJECT_SENDER", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -748,12 +690,10 @@ impl ::prost::Name for ResponseOfferSnapshot { const NAME: &'static str = "ResponseOfferSnapshot"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseOfferSnapshot".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseOfferSnapshot".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseLoadSnapshotChunk { #[prost(bytes = "vec", tag = "1")] @@ -763,12 +703,10 @@ impl ::prost::Name for ResponseLoadSnapshotChunk { const NAME: &'static str = "ResponseLoadSnapshotChunk"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseLoadSnapshotChunk".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseLoadSnapshotChunk".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseApplySnapshotChunk { #[prost(enumeration = "response_apply_snapshot_chunk::Result", tag = "1")] @@ -815,12 +753,12 @@ pub mod response_apply_snapshot_chunk { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unknown => "UNKNOWN", - Self::Accept => "ACCEPT", - Self::Abort => "ABORT", - Self::Retry => "RETRY", - Self::RetrySnapshot => "RETRY_SNAPSHOT", - Self::RejectSnapshot => "REJECT_SNAPSHOT", + Result::Unknown => "UNKNOWN", + Result::Accept => "ACCEPT", + Result::Abort => "ABORT", + Result::Retry => "RETRY", + Result::RetrySnapshot => "RETRY_SNAPSHOT", + Result::RejectSnapshot => "REJECT_SNAPSHOT", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -841,12 +779,10 @@ impl ::prost::Name for ResponseApplySnapshotChunk { const NAME: &'static str = "ResponseApplySnapshotChunk"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseApplySnapshotChunk".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseApplySnapshotChunk".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponsePrepareProposal { #[prost(bytes = "vec", repeated, tag = "1")] @@ -856,13 +792,11 @@ impl ::prost::Name for ResponsePrepareProposal { const NAME: &'static str = "ResponsePrepareProposal"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponsePrepareProposal".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponsePrepareProposal".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ResponseProcessProposal { #[prost(enumeration = "response_process_proposal::ProposalStatus", tag = "1")] pub status: i32, @@ -893,9 +827,9 @@ pub mod response_process_proposal { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unknown => "UNKNOWN", - Self::Accept => "ACCEPT", - Self::Reject => "REJECT", + ProposalStatus::Unknown => "UNKNOWN", + ProposalStatus::Accept => "ACCEPT", + ProposalStatus::Reject => "REJECT", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -913,12 +847,10 @@ impl ::prost::Name for ResponseProcessProposal { const NAME: &'static str = "ResponseProcessProposal"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ResponseProcessProposal".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ResponseProcessProposal".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommitInfo { #[prost(int32, tag = "1")] @@ -930,12 +862,10 @@ impl ::prost::Name for CommitInfo { const NAME: &'static str = "CommitInfo"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.CommitInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.CommitInfo".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ExtendedCommitInfo { /// The round at which the block proposer decided in the previous height. @@ -950,15 +880,13 @@ impl ::prost::Name for ExtendedCommitInfo { const NAME: &'static str = "ExtendedCommitInfo"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ExtendedCommitInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ExtendedCommitInfo".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// Event allows application developers to attach additional information to /// ResponseBeginBlock, ResponseEndBlock, ResponseCheckTx and ResponseDeliverTx. /// Later, transactions may be queried using these events. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Event { #[prost(string, tag = "1")] @@ -970,13 +898,11 @@ impl ::prost::Name for Event { const NAME: &'static str = "Event"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.Event".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.Event".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// EventAttribute is a single key-value pair, associated with an event. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EventAttribute { #[prost(string, tag = "1")] @@ -991,15 +917,13 @@ impl ::prost::Name for EventAttribute { const NAME: &'static str = "EventAttribute"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.EventAttribute".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.EventAttribute".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// TxResult contains results of executing the transaction. /// /// One usage is indexing transaction results. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxResult { #[prost(int64, tag = "1")] @@ -1015,13 +939,11 @@ impl ::prost::Name for TxResult { const NAME: &'static str = "TxResult"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.TxResult".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.TxResult".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// Validator +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Validator { /// The first 20 bytes of SHA256(public key) @@ -1037,13 +959,11 @@ impl ::prost::Name for Validator { const NAME: &'static str = "Validator"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.Validator".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.Validator".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// ValidatorUpdate +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorUpdate { #[prost(message, optional, tag = "1")] @@ -1055,13 +975,11 @@ impl ::prost::Name for ValidatorUpdate { const NAME: &'static str = "ValidatorUpdate"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ValidatorUpdate".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ValidatorUpdate".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } /// VoteInfo +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct VoteInfo { #[prost(message, optional, tag = "1")] @@ -1073,12 +991,10 @@ impl ::prost::Name for VoteInfo { const NAME: &'static str = "VoteInfo"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.VoteInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.VoteInfo".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ExtendedVoteInfo { #[prost(message, optional, tag = "1")] @@ -1093,12 +1009,10 @@ impl ::prost::Name for ExtendedVoteInfo { const NAME: &'static str = "ExtendedVoteInfo"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.ExtendedVoteInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.ExtendedVoteInfo".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Misbehavior { #[prost(enumeration = "MisbehaviorType", tag = "1")] @@ -1122,12 +1036,10 @@ impl ::prost::Name for Misbehavior { const NAME: &'static str = "Misbehavior"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.Misbehavior".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.Misbehavior".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Snapshot { /// The height at which the snapshot was taken @@ -1150,10 +1062,7 @@ impl ::prost::Name for Snapshot { const NAME: &'static str = "Snapshot"; const PACKAGE: &'static str = "tendermint.abci"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.abci.Snapshot".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.abci.Snapshot".into() + ::prost::alloc::format!("tendermint.abci.{}", Self::NAME) } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] @@ -1169,8 +1078,8 @@ impl CheckTxType { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::New => "NEW", - Self::Recheck => "RECHECK", + CheckTxType::New => "NEW", + CheckTxType::Recheck => "RECHECK", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -1196,9 +1105,9 @@ impl MisbehaviorType { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unknown => "UNKNOWN", - Self::DuplicateVote => "DUPLICATE_VOTE", - Self::LightClientAttack => "LIGHT_CLIENT_ATTACK", + MisbehaviorType::Unknown => "UNKNOWN", + MisbehaviorType::DuplicateVote => "DUPLICATE_VOTE", + MisbehaviorType::LightClientAttack => "LIGHT_CLIENT_ATTACK", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -1214,13 +1123,7 @@ impl MisbehaviorType { /// Generated client implementations. #[cfg(feature = "rpc")] pub mod abci_application_client { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; use tonic::codegen::http::Uri; #[derive(Debug, Clone)] @@ -1242,8 +1145,8 @@ pub mod abci_application_client { where T: tonic::client::GrpcService, T::Error: Into, - T::ResponseBody: Body + std::marker::Send + 'static, - ::Error: Into + std::marker::Send, + T::ResponseBody: Body + Send + 'static, + ::Error: Into + Send, { pub fn new(inner: T) -> Self { let inner = tonic::client::Grpc::new(inner); @@ -1268,7 +1171,7 @@ pub mod abci_application_client { >, , - >>::Error: Into + std::marker::Send + std::marker::Sync, + >>::Error: Into + Send + Sync, { AbciApplicationClient::new(InterceptedService::new(inner, interceptor)) } @@ -1311,7 +1214,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1332,7 +1236,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1353,7 +1258,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1377,7 +1283,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1401,7 +1308,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1422,7 +1330,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1443,7 +1352,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1467,7 +1377,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1491,7 +1402,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1517,7 +1429,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1541,7 +1454,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1567,7 +1481,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1593,7 +1508,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1622,7 +1538,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1651,7 +1568,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1677,7 +1595,8 @@ pub mod abci_application_client { .ready() .await .map_err(|e| { - tonic::Status::unknown( + tonic::Status::new( + tonic::Code::Unknown, format!("Service was not ready: {}", e.into()), ) })?; @@ -1697,17 +1616,11 @@ pub mod abci_application_client { /// Generated server implementations. #[cfg(feature = "rpc")] pub mod abci_application_server { - #![allow( - unused_variables, - dead_code, - missing_docs, - clippy::wildcard_imports, - clippy::let_unit_value, - )] + #![allow(unused_variables, dead_code, missing_docs, clippy::let_unit_value)] use tonic::codegen::*; /// Generated trait containing gRPC methods that should be implemented for use with AbciApplicationServer. #[async_trait] - pub trait AbciApplication: std::marker::Send + std::marker::Sync + 'static { + pub trait AbciApplication: Send + Sync + 'static { async fn echo( &self, request: tonic::Request, @@ -1804,18 +1717,20 @@ pub mod abci_application_server { >; } #[derive(Debug)] - pub struct AbciApplicationServer { - inner: Arc, + pub struct AbciApplicationServer { + inner: _Inner, accept_compression_encodings: EnabledCompressionEncodings, send_compression_encodings: EnabledCompressionEncodings, max_decoding_message_size: Option, max_encoding_message_size: Option, } - impl AbciApplicationServer { + struct _Inner(Arc); + impl AbciApplicationServer { pub fn new(inner: T) -> Self { Self::from_arc(Arc::new(inner)) } pub fn from_arc(inner: Arc) -> Self { + let inner = _Inner(inner); Self { inner, accept_compression_encodings: Default::default(), @@ -1865,8 +1780,8 @@ pub mod abci_application_server { impl tonic::codegen::Service> for AbciApplicationServer where T: AbciApplication, - B: Body + std::marker::Send + 'static, - B::Error: Into + std::marker::Send + 'static, + B: Body + Send + 'static, + B::Error: Into + Send + 'static, { type Response = http::Response; type Error = std::convert::Infallible; @@ -1878,6 +1793,7 @@ pub mod abci_application_server { Poll::Ready(Ok(())) } fn call(&mut self, req: http::Request) -> Self::Future { + let inner = self.inner.clone(); match req.uri().path() { "/tendermint.abci.ABCIApplication/Echo" => { #[allow(non_camel_case_types)] @@ -1907,6 +1823,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = EchoSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1951,6 +1868,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = FlushSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -1995,6 +1913,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = InfoSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2040,6 +1959,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = DeliverTxSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2085,6 +2005,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CheckTxSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2129,6 +2050,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = QuerySvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2174,6 +2096,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = CommitSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2219,6 +2142,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = InitChainSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2264,6 +2188,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = BeginBlockSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2309,6 +2234,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = EndBlockSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2355,6 +2281,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ListSnapshotsSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2401,6 +2328,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = OfferSnapshotSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2447,6 +2375,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = LoadSnapshotChunkSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2496,6 +2425,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ApplySnapshotChunkSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2542,6 +2472,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = PrepareProposalSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2588,6 +2519,7 @@ pub mod abci_application_server { let max_encoding_message_size = self.max_encoding_message_size; let inner = self.inner.clone(); let fut = async move { + let inner = inner.0; let method = ProcessProposalSvc(inner); let codec = tonic::codec::ProstCodec::default(); let mut grpc = tonic::server::Grpc::new(codec) @@ -2606,25 +2538,20 @@ pub mod abci_application_server { } _ => { Box::pin(async move { - let mut response = http::Response::new(empty_body()); - let headers = response.headers_mut(); - headers - .insert( - tonic::Status::GRPC_STATUS, - (tonic::Code::Unimplemented as i32).into(), - ); - headers - .insert( - http::header::CONTENT_TYPE, - tonic::metadata::GRPC_CONTENT_TYPE, - ); - Ok(response) + Ok( + http::Response::builder() + .status(200) + .header("grpc-status", "12") + .header("content-type", "application/grpc") + .body(empty_body()) + .unwrap(), + ) }) } } } } - impl Clone for AbciApplicationServer { + impl Clone for AbciApplicationServer { fn clone(&self) -> Self { let inner = self.inner.clone(); Self { @@ -2636,9 +2563,17 @@ pub mod abci_application_server { } } } - /// Generated gRPC service name - pub const SERVICE_NAME: &str = "tendermint.abci.ABCIApplication"; - impl tonic::server::NamedService for AbciApplicationServer { - const NAME: &'static str = SERVICE_NAME; + impl Clone for _Inner { + fn clone(&self) -> Self { + Self(Arc::clone(&self.0)) + } + } + impl std::fmt::Debug for _Inner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:?}", self.0) + } + } + impl tonic::server::NamedService for AbciApplicationServer { + const NAME: &'static str = "tendermint.abci.ABCIApplication"; } } diff --git a/crates/proto/src/gen/tendermint.crypto.rs b/crates/proto/src/gen/tendermint.crypto.rs index 916eaf75f5..d0c624cc9b 100644 --- a/crates/proto/src/gen/tendermint.crypto.rs +++ b/crates/proto/src/gen/tendermint.crypto.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Proof { #[prost(int64, tag = "1")] @@ -14,12 +14,10 @@ impl ::prost::Name for Proof { const NAME: &'static str = "Proof"; const PACKAGE: &'static str = "tendermint.crypto"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.crypto.Proof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.crypto.Proof".into() + ::prost::alloc::format!("tendermint.crypto.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValueOp { /// Encoded in ProofOp.Key. @@ -33,12 +31,10 @@ impl ::prost::Name for ValueOp { const NAME: &'static str = "ValueOp"; const PACKAGE: &'static str = "tendermint.crypto"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.crypto.ValueOp".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.crypto.ValueOp".into() + ::prost::alloc::format!("tendermint.crypto.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DominoOp { #[prost(string, tag = "1")] @@ -52,15 +48,13 @@ impl ::prost::Name for DominoOp { const NAME: &'static str = "DominoOp"; const PACKAGE: &'static str = "tendermint.crypto"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.crypto.DominoOp".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.crypto.DominoOp".into() + ::prost::alloc::format!("tendermint.crypto.{}", Self::NAME) } } /// ProofOp defines an operation used for calculating Merkle root /// The data could be arbitrary format, providing nessecary data /// for example neighbouring node hash +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProofOp { #[prost(string, tag = "1")] @@ -74,13 +68,11 @@ impl ::prost::Name for ProofOp { const NAME: &'static str = "ProofOp"; const PACKAGE: &'static str = "tendermint.crypto"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.crypto.ProofOp".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.crypto.ProofOp".into() + ::prost::alloc::format!("tendermint.crypto.{}", Self::NAME) } } /// ProofOps is Merkle proof defined by the list of ProofOps +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ProofOps { #[prost(message, repeated, tag = "1")] @@ -90,13 +82,11 @@ impl ::prost::Name for ProofOps { const NAME: &'static str = "ProofOps"; const PACKAGE: &'static str = "tendermint.crypto"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.crypto.ProofOps".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.crypto.ProofOps".into() + ::prost::alloc::format!("tendermint.crypto.{}", Self::NAME) } } /// PublicKey defines the keys available for use with Validators +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PublicKey { #[prost(oneof = "public_key::Sum", tags = "1, 2")] @@ -104,6 +94,7 @@ pub struct PublicKey { } /// Nested message and enum types in `PublicKey`. pub mod public_key { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Sum { #[prost(bytes, tag = "1")] @@ -116,9 +107,6 @@ impl ::prost::Name for PublicKey { const NAME: &'static str = "PublicKey"; const PACKAGE: &'static str = "tendermint.crypto"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.crypto.PublicKey".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.crypto.PublicKey".into() + ::prost::alloc::format!("tendermint.crypto.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/tendermint.p2p.rs b/crates/proto/src/gen/tendermint.p2p.rs index f948e62121..61828675bb 100644 --- a/crates/proto/src/gen/tendermint.p2p.rs +++ b/crates/proto/src/gen/tendermint.p2p.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct NetAddress { #[prost(string, tag = "1")] @@ -12,13 +12,11 @@ impl ::prost::Name for NetAddress { const NAME: &'static str = "NetAddress"; const PACKAGE: &'static str = "tendermint.p2p"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.p2p.NetAddress".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.p2p.NetAddress".into() + ::prost::alloc::format!("tendermint.p2p.{}", Self::NAME) } } -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct ProtocolVersion { #[prost(uint64, tag = "1")] pub p2p: u64, @@ -31,12 +29,10 @@ impl ::prost::Name for ProtocolVersion { const NAME: &'static str = "ProtocolVersion"; const PACKAGE: &'static str = "tendermint.p2p"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.p2p.ProtocolVersion".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.p2p.ProtocolVersion".into() + ::prost::alloc::format!("tendermint.p2p.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DefaultNodeInfo { #[prost(message, optional, tag = "1")] @@ -60,12 +56,10 @@ impl ::prost::Name for DefaultNodeInfo { const NAME: &'static str = "DefaultNodeInfo"; const PACKAGE: &'static str = "tendermint.p2p"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.p2p.DefaultNodeInfo".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.p2p.DefaultNodeInfo".into() + ::prost::alloc::format!("tendermint.p2p.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DefaultNodeInfoOther { #[prost(string, tag = "1")] @@ -77,9 +71,6 @@ impl ::prost::Name for DefaultNodeInfoOther { const NAME: &'static str = "DefaultNodeInfoOther"; const PACKAGE: &'static str = "tendermint.p2p"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.p2p.DefaultNodeInfoOther".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.p2p.DefaultNodeInfoOther".into() + ::prost::alloc::format!("tendermint.p2p.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/tendermint.types.rs b/crates/proto/src/gen/tendermint.types.rs index 0b2130371a..dd05ee65c4 100644 --- a/crates/proto/src/gen/tendermint.types.rs +++ b/crates/proto/src/gen/tendermint.types.rs @@ -1,4 +1,4 @@ -// This file is @generated by prost-build. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorSet { #[prost(message, repeated, tag = "1")] @@ -12,12 +12,10 @@ impl ::prost::Name for ValidatorSet { const NAME: &'static str = "ValidatorSet"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.ValidatorSet".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.ValidatorSet".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Validator { #[prost(bytes = "vec", tag = "1")] @@ -33,12 +31,10 @@ impl ::prost::Name for Validator { const NAME: &'static str = "Validator"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Validator".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Validator".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SimpleValidator { #[prost(message, optional, tag = "1")] @@ -50,13 +46,11 @@ impl ::prost::Name for SimpleValidator { const NAME: &'static str = "SimpleValidator"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.SimpleValidator".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.SimpleValidator".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// PartsetHeader +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PartSetHeader { #[prost(uint32, tag = "1")] @@ -68,12 +62,10 @@ impl ::prost::Name for PartSetHeader { const NAME: &'static str = "PartSetHeader"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.PartSetHeader".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.PartSetHeader".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Part { #[prost(uint32, tag = "1")] @@ -87,13 +79,11 @@ impl ::prost::Name for Part { const NAME: &'static str = "Part"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Part".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Part".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// BlockID +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BlockId { #[prost(bytes = "vec", tag = "1")] @@ -105,13 +95,11 @@ impl ::prost::Name for BlockId { const NAME: &'static str = "BlockID"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.BlockID".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.BlockID".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// Header defines the structure of a block header. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Header { /// basic block info @@ -164,13 +152,11 @@ impl ::prost::Name for Header { const NAME: &'static str = "Header"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Header".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Header".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// Data contains the set of transactions included in the block +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Data { /// Txs that will be applied by state @ block.Height+1. @@ -183,14 +169,12 @@ impl ::prost::Name for Data { const NAME: &'static str = "Data"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Data".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Data".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// Vote represents a prevote, precommit, or commit vote from validators for /// consensus. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Vote { #[prost(enumeration = "SignedMsgType", tag = "1")] @@ -215,13 +199,11 @@ impl ::prost::Name for Vote { const NAME: &'static str = "Vote"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Vote".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Vote".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// Commit contains the evidence that a block was committed by a set of validators. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Commit { #[prost(int64, tag = "1")] @@ -237,13 +219,11 @@ impl ::prost::Name for Commit { const NAME: &'static str = "Commit"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Commit".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Commit".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// CommitSig is a part of the Vote included in a Commit. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CommitSig { #[prost(enumeration = "BlockIdFlag", tag = "1")] @@ -259,12 +239,10 @@ impl ::prost::Name for CommitSig { const NAME: &'static str = "CommitSig"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.CommitSig".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.CommitSig".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Proposal { #[prost(enumeration = "SignedMsgType", tag = "1")] @@ -286,12 +264,10 @@ impl ::prost::Name for Proposal { const NAME: &'static str = "Proposal"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Proposal".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Proposal".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SignedHeader { #[prost(message, optional, tag = "1")] @@ -303,12 +279,10 @@ impl ::prost::Name for SignedHeader { const NAME: &'static str = "SignedHeader"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.SignedHeader".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.SignedHeader".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LightBlock { #[prost(message, optional, tag = "1")] @@ -320,12 +294,10 @@ impl ::prost::Name for LightBlock { const NAME: &'static str = "LightBlock"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.LightBlock".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.LightBlock".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BlockMeta { #[prost(message, optional, tag = "1")] @@ -341,13 +313,11 @@ impl ::prost::Name for BlockMeta { const NAME: &'static str = "BlockMeta"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.BlockMeta".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.BlockMeta".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// TxProof represents a Merkle proof of the presence of a transaction in the Merkle tree. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TxProof { #[prost(bytes = "vec", tag = "1")] @@ -361,10 +331,7 @@ impl ::prost::Name for TxProof { const NAME: &'static str = "TxProof"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.TxProof".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.TxProof".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// BlockIdFlag indicates which BlcokID the signature is for @@ -383,10 +350,10 @@ impl BlockIdFlag { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unknown => "BLOCK_ID_FLAG_UNKNOWN", - Self::Absent => "BLOCK_ID_FLAG_ABSENT", - Self::Commit => "BLOCK_ID_FLAG_COMMIT", - Self::Nil => "BLOCK_ID_FLAG_NIL", + BlockIdFlag::Unknown => "BLOCK_ID_FLAG_UNKNOWN", + BlockIdFlag::Absent => "BLOCK_ID_FLAG_ABSENT", + BlockIdFlag::Commit => "BLOCK_ID_FLAG_COMMIT", + BlockIdFlag::Nil => "BLOCK_ID_FLAG_NIL", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -418,10 +385,10 @@ impl SignedMsgType { /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { - Self::Unknown => "SIGNED_MSG_TYPE_UNKNOWN", - Self::Prevote => "SIGNED_MSG_TYPE_PREVOTE", - Self::Precommit => "SIGNED_MSG_TYPE_PRECOMMIT", - Self::Proposal => "SIGNED_MSG_TYPE_PROPOSAL", + SignedMsgType::Unknown => "SIGNED_MSG_TYPE_UNKNOWN", + SignedMsgType::Prevote => "SIGNED_MSG_TYPE_PREVOTE", + SignedMsgType::Precommit => "SIGNED_MSG_TYPE_PRECOMMIT", + SignedMsgType::Proposal => "SIGNED_MSG_TYPE_PROPOSAL", } } /// Creates an enum from field names used in the ProtoBuf definition. @@ -435,6 +402,7 @@ impl SignedMsgType { } } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Evidence { #[prost(oneof = "evidence::Sum", tags = "1, 2")] @@ -442,6 +410,7 @@ pub struct Evidence { } /// Nested message and enum types in `Evidence`. pub mod evidence { + #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Sum { #[prost(message, tag = "1")] @@ -454,13 +423,11 @@ impl ::prost::Name for Evidence { const NAME: &'static str = "Evidence"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Evidence".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Evidence".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// DuplicateVoteEvidence contains evidence of a validator signed two conflicting votes. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DuplicateVoteEvidence { #[prost(message, optional, tag = "1")] @@ -478,13 +445,11 @@ impl ::prost::Name for DuplicateVoteEvidence { const NAME: &'static str = "DuplicateVoteEvidence"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.DuplicateVoteEvidence".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.DuplicateVoteEvidence".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// LightClientAttackEvidence contains evidence of a set of validators attempting to mislead a light client. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LightClientAttackEvidence { #[prost(message, optional, tag = "1")] @@ -502,12 +467,10 @@ impl ::prost::Name for LightClientAttackEvidence { const NAME: &'static str = "LightClientAttackEvidence"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.LightClientAttackEvidence".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.LightClientAttackEvidence".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct EvidenceList { #[prost(message, repeated, tag = "1")] @@ -517,12 +480,10 @@ impl ::prost::Name for EvidenceList { const NAME: &'static str = "EvidenceList"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.EvidenceList".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.EvidenceList".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Block { #[prost(message, optional, tag = "1")] @@ -538,14 +499,12 @@ impl ::prost::Name for Block { const NAME: &'static str = "Block"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.Block".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.Block".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// ConsensusParams contains consensus critical parameters that determine the /// validity of blocks. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ConsensusParams { #[prost(message, optional, tag = "1")] @@ -561,14 +520,12 @@ impl ::prost::Name for ConsensusParams { const NAME: &'static str = "ConsensusParams"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.ConsensusParams".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.ConsensusParams".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// BlockParams contains limits on the block size. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct BlockParams { /// Max block size, in bytes. /// Note: must be greater than 0 @@ -583,14 +540,12 @@ impl ::prost::Name for BlockParams { const NAME: &'static str = "BlockParams"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.BlockParams".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.BlockParams".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// EvidenceParams determine how we handle evidence of malfeasance. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct EvidenceParams { /// Max age of evidence, in blocks. /// @@ -615,14 +570,12 @@ impl ::prost::Name for EvidenceParams { const NAME: &'static str = "EvidenceParams"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.EvidenceParams".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.EvidenceParams".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// ValidatorParams restrict the public key types validators can use. /// NOTE: uses ABCI pubkey naming, not Amino names. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ValidatorParams { #[prost(string, repeated, tag = "1")] @@ -632,14 +585,12 @@ impl ::prost::Name for ValidatorParams { const NAME: &'static str = "ValidatorParams"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.ValidatorParams".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.ValidatorParams".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// VersionParams contains the ABCI application version. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct VersionParams { #[prost(uint64, tag = "1")] pub app: u64, @@ -648,16 +599,14 @@ impl ::prost::Name for VersionParams { const NAME: &'static str = "VersionParams"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.VersionParams".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.VersionParams".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } /// HashedParams is a subset of ConsensusParams. /// /// It is hashed into the Header.ConsensusHash. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct HashedParams { #[prost(int64, tag = "1")] pub block_max_bytes: i64, @@ -668,9 +617,6 @@ impl ::prost::Name for HashedParams { const NAME: &'static str = "HashedParams"; const PACKAGE: &'static str = "tendermint.types"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.types.HashedParams".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.types.HashedParams".into() + ::prost::alloc::format!("tendermint.types.{}", Self::NAME) } } diff --git a/crates/proto/src/gen/tendermint.version.rs b/crates/proto/src/gen/tendermint.version.rs index ef045e12cb..e2028c7901 100644 --- a/crates/proto/src/gen/tendermint.version.rs +++ b/crates/proto/src/gen/tendermint.version.rs @@ -1,7 +1,7 @@ -// This file is @generated by prost-build. /// App includes the protocol and software version for the application. /// This information is included in ResponseInfo. The App.Protocol can be /// updated in ResponseEndBlock. +#[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct App { #[prost(uint64, tag = "1")] @@ -13,16 +13,14 @@ impl ::prost::Name for App { const NAME: &'static str = "App"; const PACKAGE: &'static str = "tendermint.version"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.version.App".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.version.App".into() + ::prost::alloc::format!("tendermint.version.{}", Self::NAME) } } /// Consensus captures the consensus rules for processing a block in the blockchain, /// including all blockchain data structures and the rules of the application's /// state transition machine. -#[derive(Clone, Copy, PartialEq, ::prost::Message)] +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] pub struct Consensus { #[prost(uint64, tag = "1")] pub block: u64, @@ -33,9 +31,6 @@ impl ::prost::Name for Consensus { const NAME: &'static str = "Consensus"; const PACKAGE: &'static str = "tendermint.version"; fn full_name() -> ::prost::alloc::string::String { - "tendermint.version.Consensus".into() - } - fn type_url() -> ::prost::alloc::string::String { - "/tendermint.version.Consensus".into() + ::prost::alloc::format!("tendermint.version.{}", Self::NAME) } } diff --git a/crates/proto/src/lib.rs b/crates/proto/src/lib.rs index 24efda9fd4..d48882a2c2 100644 --- a/crates/proto/src/lib.rs +++ b/crates/proto/src/lib.rs @@ -225,6 +225,13 @@ pub mod penumbra { } } + pub mod cnidarium { + pub mod v1 { + include!("gen/penumbra.cnidarium.v1.rs"); + include!("gen/penumbra.cnidarium.v1.serde.rs"); + } + } + pub mod util { pub mod tendermint_proxy { pub mod v1 { diff --git a/crates/proto/src/protobuf/tendermint_compat.rs b/crates/proto/src/protobuf/tendermint_compat.rs index 47111a3fff..661b3012ef 100644 --- a/crates/proto/src/protobuf/tendermint_compat.rs +++ b/crates/proto/src/protobuf/tendermint_compat.rs @@ -60,10 +60,16 @@ impl From for penumbra_pb::TxResult { } impl From for penumbra_pb::Tag { - fn from(event_attr: tendermint::abci::EventAttribute) -> Self { + fn from( + tendermint::abci::EventAttribute { + key, + value, + index: _, + }: tendermint::abci::EventAttribute, + ) -> Self { Self { - key: event_attr.key_bytes().into(), - value: event_attr.value_bytes().into(), + key: key.into_bytes(), + value: value.into_bytes(), // TODO(kate): this was set to false previously, but it should probably use the // index field from the tendermint object. for now, carry out a refactor and avoid // changing behavior while doing so. @@ -72,25 +78,6 @@ impl From for penumbra_pb::Tag { } } -// impl From for penumbra_pb::Tag { -// fn from( -// tendermint::abci::event::v0_37::EventAttribute { -// key, -// value, -// index: _, -// }: tendermint::abci::EventAttribute, -// ) -> Self { -// Self { -// key: key.into_bytes(), -// value: value.into_bytes(), -// // TODO(kate): this was set to false previously, but it should probably use the -// // index field from the tendermint object. for now, carry out a refactor and avoid -// // changing behavior while doing so. -// index: false, -// } -// } -// } - // === broadcast_tx_async === impl From @@ -102,7 +89,6 @@ impl From data, log, hash, - .. }: tendermint_rpc::endpoint::broadcast::tx_async::Response, ) -> Self { Self { @@ -125,7 +111,6 @@ impl From data, log, hash, - .. }: tendermint_rpc::endpoint::broadcast::tx_sync::Response, ) -> Self { Self { diff --git a/crates/test/mock-consensus/src/block.rs b/crates/test/mock-consensus/src/block.rs index ba390b795f..7eb45b10a6 100644 --- a/crates/test/mock-consensus/src/block.rs +++ b/crates/test/mock-consensus/src/block.rs @@ -288,7 +288,7 @@ where "made block" ); // pass the current value of last_commit with this header - let block = Block::new(header.clone(), data, evidence, last_commit); + let block = Block::new(header.clone(), data, evidence, last_commit)?; // Now that the block is finalized, we can transition to the next block. // Generate a commit for the header we just made, that will be diff --git a/crates/util/auto-https/Cargo.toml b/crates/util/auto-https/Cargo.toml index 3250622b9b..6b445cf3f9 100644 --- a/crates/util/auto-https/Cargo.toml +++ b/crates/util/auto-https/Cargo.toml @@ -12,7 +12,7 @@ publish = false [dependencies] anyhow = {workspace = true} futures = {workspace = true} -rustls = "0.23.20" +rustls = "0.21" axum-server = {workspace = true, features = []} -rustls-acme = { version = "0.12.1", features = ["axum"] } +rustls-acme = { version = "0.7", features = ["axum"] } tracing = {workspace = true} diff --git a/crates/util/auto-https/src/lib.rs b/crates/util/auto-https/src/lib.rs index 3bce7968ec..50e3133558 100644 --- a/crates/util/auto-https/src/lib.rs +++ b/crates/util/auto-https/src/lib.rs @@ -45,6 +45,7 @@ pub fn axum_acceptor( // Define our server configuration, using the ACME certificate resolver. let mut rustls_config = ServerConfig::builder() + .with_safe_defaults() .with_no_client_auth() .with_cert_resolver(state.resolver()); rustls_config.alpn_protocols = self::alpn_protocols(); diff --git a/crates/util/tower-trace/src/lib.rs b/crates/util/tower-trace/src/lib.rs index 0992a0f28b..5e60baf249 100644 --- a/crates/util/tower-trace/src/lib.rs +++ b/crates/util/tower-trace/src/lib.rs @@ -11,7 +11,7 @@ use std::net::SocketAddr; // request level, but the hook available to do that gives us an http::Request // rather than a tonic::Request, so the tonic::Request::remote_addr method isn't // available. -pub fn remote_addr(req: &http::Request) -> Option { +pub fn remote_addr(req: &http::Request<()>) -> Option { use tonic::transport::server::TcpConnectInfo; // NOTE: needs to also check TlsConnectInfo if we use TLS req.extensions() diff --git a/crates/view/Cargo.toml b/crates/view/Cargo.toml index 3e6e067a42..aebc7e6fa8 100644 --- a/crates/view/Cargo.toml +++ b/crates/view/Cargo.toml @@ -23,7 +23,6 @@ async-stream = {workspace = true} async-trait = {workspace = true} bytes = {workspace = true, features = ["serde"]} camino = {workspace = true} -cnidarium = {workspace = true, features = ["rpc"]} decaf377 = {workspace = true, features = ["r1cs"], default-features = true} digest = "0.9" ed25519-consensus = {workspace = true} @@ -65,7 +64,7 @@ tap = {workspace = true} tendermint = {workspace = true} tokio = {workspace = true, features = ["full"]} tokio-stream = {workspace = true, features = ["sync"]} -tonic = {workspace = true, features = ["tls", "tls-webpki-roots"]} +tonic = {workspace = true} tracing = {workspace = true} tracing-subscriber = {workspace = true} url = {workspace = true} diff --git a/crates/view/src/service.rs b/crates/view/src/service.rs index 465dc8c028..8e3c8ce26c 100644 --- a/crates/view/src/service.rs +++ b/crates/view/src/service.rs @@ -17,8 +17,6 @@ use rand_core::OsRng; use tap::{Tap, TapFallible}; use tokio::sync::{watch, RwLock}; use tokio_stream::wrappers::WatchStream; -use tonic::transport::channel::ClientTlsConfig; -use tonic::transport::channel::Endpoint; use tonic::{async_trait, transport::Channel, Request, Response, Status}; use tracing::{instrument, Instrument}; use url::Url; @@ -125,7 +123,13 @@ impl ViewServer { /// will be backed by the same scanning task, rather than each spawning its own. pub async fn new(storage: Storage, node: Url) -> anyhow::Result { let span = tracing::error_span!(parent: None, "view"); - let channel = Self::get_pd_channel(node.clone()).await?; + let channel = Channel::from_shared(node.to_string()) + .with_context(|| "could not parse node URI")? + .connect() + .instrument(span.clone()) + .await + .with_context(|| "could not connect to grpc server") + .tap_err(|error| tracing::error!(?error, "could not connect to grpc server"))?; let (worker, state_commitment_tree, error_slot, sync_height_rx) = Worker::new(storage.clone(), channel) @@ -146,24 +150,6 @@ impl ViewServer { }) } - /// Obtain a Tonic [Channel] to a remote `pd` endpoint. - /// - /// Provided as a convenience method for bootstrapping a connection. - /// Handles configuring TLS if the URL is HTTPS. Also adds a tracing span - /// to the working [Channel]. - pub async fn get_pd_channel(node: Url) -> anyhow::Result { - let endpoint = get_pd_endpoint(node).await?; - let span = tracing::error_span!(parent: None, "view"); - let c: Channel = endpoint - .connect() - .instrument(span.clone()) - .await - .with_context(|| "could not connect to grpc server") - .tap_err(|error| tracing::error!(?error, "could not connect to grpc server"))?; - - Ok(c) - } - /// Checks if the view server worker has encountered an error. /// /// This function returns a gRPC [`tonic::Status`] containing the view server worker error if @@ -1857,16 +1843,3 @@ impl ViewService for ViewServer { unimplemented!("unbonding_tokens_by_address_index currently only implemented on web") } } - -/// Convert a pd node URL to a Tonic `Endpoint`. -/// -/// Required in order to configure TLS for HTTPS endpoints. -async fn get_pd_endpoint(node: Url) -> anyhow::Result { - let endpoint = match node.scheme() { - "http" => Channel::from_shared(node.to_string())?, - "https" => Channel::from_shared(node.to_string())? - .tls_config(ClientTlsConfig::new().with_webpki_roots())?, - other => anyhow::bail!("unknown url scheme {other}"), - }; - Ok(endpoint) -} diff --git a/crates/view/src/storage/sct.rs b/crates/view/src/storage/sct.rs index a3e9bdce86..dffaa134fb 100644 --- a/crates/view/src/storage/sct.rs +++ b/crates/view/src/storage/sct.rs @@ -17,13 +17,12 @@ pub struct TreeStore<'a, 'c: 'a>(pub &'a mut Transaction<'c>); impl Read for TreeStore<'_, '_> { type Error = anyhow::Error; - type HashesIter<'a> - = Box> + 'a> + type HashesIter<'a> = Box> + 'a> where Self: 'a; - type CommitmentsIter<'a> - = Box> + 'a> + type CommitmentsIter<'a> = Box> + + 'a> where Self: 'a; diff --git a/crates/view/src/worker.rs b/crates/view/src/worker.rs index 5bd6e41d4e..ddc92bc8a8 100644 --- a/crates/view/src/worker.rs +++ b/crates/view/src/worker.rs @@ -485,15 +485,14 @@ async fn sct_divergence_check( height: u64, actual_root: penumbra_tct::Root, ) -> anyhow::Result<()> { - use cnidarium::proto::v1::query_service_client::QueryServiceClient; - use penumbra_proto::DomainType; + use penumbra_proto::{cnidarium::v1::query_service_client::QueryServiceClient, DomainType}; use penumbra_sct::state_key as sct_state_key; let mut client = QueryServiceClient::new(channel); tracing::info!(?height, "fetching anchor @ height"); let value = client - .key_value(cnidarium::proto::v1::KeyValueRequest { + .key_value(penumbra_proto::cnidarium::v1::KeyValueRequest { key: sct_state_key::tree::anchor_by_height(height), proof: false, ..Default::default() diff --git a/flake.lock b/flake.lock index bf9a8a311b..96be4adf84 100644 --- a/flake.lock +++ b/flake.lock @@ -1,12 +1,17 @@ { "nodes": { "crane": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, "locked": { - "lastModified": 1734808813, - "narHash": "sha256-3aH/0Y6ajIlfy7j52FGZ+s4icVX0oHhqBzRdlOeztqg=", + "lastModified": 1724537630, + "narHash": "sha256-gpqINM71zp3kw5XYwUXa84ZtPnCmLLnByuFoYesT1bY=", "owner": "ipetkov", "repo": "crane", - "rev": "72e2d02dbac80c8c86bf6bf3e785536acf8ee926", + "rev": "3e08f4b1fc9aaede5dd511d8f5f4ef27501e49b0", "type": "github" }, "original": { @@ -20,11 +25,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1731533236, - "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", + "lastModified": 1710146030, + "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", "owner": "numtide", "repo": "flake-utils", - "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", + "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", "type": "github" }, "original": { @@ -35,11 +40,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1734991663, - "narHash": "sha256-8T660guvdaOD+2/Cj970bWlQwAyZLKrrbkhYOFcY1YE=", + "lastModified": 1733808091, + "narHash": "sha256-KWwINTQelKOoQgrXftxoqxmKFZb9pLVfnRvK270nkVk=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "6c90912761c43e22b6fb000025ab96dd31c971ff", + "rev": "a0f3e10d94359665dba45b71b4227b0aeb851f8e", "type": "github" }, "original": { @@ -64,11 +69,11 @@ ] }, "locked": { - "lastModified": 1735093658, - "narHash": "sha256-eIUYGDtairggo7+JXSwN7b6Zr03BJ7tsZL/U0NkDr0s=", + "lastModified": 1724898214, + "narHash": "sha256-4yMO9+Lsr3zqTf4clAGGag/bfNTmc/ITOXbJQcOEok4=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "ca249a1d98eff27e92665ac462b9d47f58141925", + "rev": "0bc2c784e3a6ce30a2ab1b9f47325ccbed13039f", "type": "github" }, "original": { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 7de4ccb549..23e8b42119 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,7 +1,7 @@ [toolchain] # We set a specific version of rust so that CI workflows use the same # version development environments do. -channel = "1.83" +channel = "1.80" components = [ "rustfmt", "rust-analyzer" ] # Include wasm toolchain, for CI tests to check wasm32 build targets still work, # to avoid downstream breakage in `penumbra-wasm` crate, in the web repo. diff --git a/tools/proto-compiler/Cargo.lock b/tools/proto-compiler/Cargo.lock index f60a62414d..02560aace3 100644 --- a/tools/proto-compiler/Cargo.lock +++ b/tools/proto-compiler/Cargo.lock @@ -63,7 +63,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.38", ] [[package]] @@ -74,15 +74,9 @@ checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.38", ] -[[package]] -name = "atomic-waker" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" - [[package]] name = "autocfg" version = "1.1.0" @@ -91,17 +85,18 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "axum" -version = "0.7.5" +version = "0.6.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" +checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ "async-trait", "axum-core", + "bitflags 1.3.2", "bytes", "futures-util", "http", "http-body", - "http-body-util", + "hyper", "itoa", "matchit", "memchr", @@ -118,20 +113,17 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.4.5" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", "bytes", "futures-util", "http", "http-body", - "http-body-util", "mime", - "pin-project-lite", "rustversion", - "sync_wrapper", "tower-layer", "tower-service", ] @@ -157,12 +149,6 @@ version = "0.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" -[[package]] -name = "base64" -version = "0.22.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" - [[package]] name = "bitflags" version = "1.3.2" @@ -208,9 +194,9 @@ dependencies = [ [[package]] name = "bytes" -version = "1.9.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" dependencies = [ "serde", ] @@ -245,17 +231,6 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" -[[package]] -name = "cosmos-sdk-proto" -version = "0.26.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "462e1f6a8e005acc8835d32d60cbd7973ed65ea2a8d8473830e675f050956427" -dependencies = [ - "prost", - "tendermint-proto", - "tonic", -] - [[package]] name = "cpufeatures" version = "0.2.9" @@ -314,7 +289,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -412,17 +387,17 @@ checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" [[package]] name = "h2" -version = "0.4.7" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ - "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", + "futures-util", "http", - "indexmap 2.0.2", + "indexmap 1.9.3", "slab", "tokio", "tokio-util", @@ -447,12 +422,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "heck" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" - [[package]] name = "hex" version = "0.4.3" @@ -460,36 +429,33 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] -name = "http" -version = "1.2.0" +name = "home" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" dependencies = [ - "bytes", - "fnv", - "itoa", + "windows-sys", ] [[package]] -name = "http-body" -version = "1.0.1" +name = "http" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ "bytes", - "http", + "fnv", + "itoa", ] [[package]] -name = "http-body-util" -version = "0.1.2" +name = "http-body" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ "bytes", - "futures-util", "http", - "http-body", "pin-project-lite", ] @@ -507,12 +473,13 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "1.5.1" +version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" +checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ "bytes", "futures-channel", + "futures-core", "futures-util", "h2", "http", @@ -521,55 +488,35 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "smallvec", + "socket2 0.4.9", "tokio", + "tower-service", + "tracing", "want", ] [[package]] name = "hyper-timeout" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" -dependencies = [ - "hyper", - "hyper-util", - "pin-project-lite", - "tokio", - "tower-service", -] - -[[package]] -name = "hyper-util" -version = "0.1.10" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http", - "http-body", "hyper", "pin-project-lite", - "socket2", "tokio", - "tower-service", - "tracing", + "tokio-io-timeout", ] [[package]] name = "ibc-proto" -version = "0.51.1" +version = "0.40.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b70f517162e74e2d35875b8b94bf4d1e45f2c69ef3de452dc855944455d33ca" +checksum = "4222cfac37f21da28292db0f2673fdb8455284895891ff09979680243efb9a20" dependencies = [ - "base64 0.22.1", + "base64", "bytes", - "cosmos-sdk-proto", "flex-error", "ics23", - "informalsystems-pbjson", "prost", "subtle-encoding", "tendermint-proto", @@ -578,9 +525,9 @@ dependencies = [ [[package]] name = "ics23" -version = "0.12.0" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73b17f1a5bd7d12ad30a21445cfa5f52fd7651cb3243ba866f9916b1ec112f12" +checksum = "18798160736c1e368938ba6967dbcb3c7afb3256b442a5506ba5222eebb68a5a" dependencies = [ "anyhow", "blake2", @@ -621,7 +568,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aa4a0980c8379295100d70854354e78df2ee1c6ca0f96ffe89afeb3140e3a3d" dependencies = [ - "base64 0.21.4", + "base64", "serde", ] @@ -634,15 +581,6 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" -dependencies = [ - "either", -] - [[package]] name = "itoa" version = "1.0.9" @@ -660,9 +598,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.168" +version = "0.2.149" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" +checksum = "a08173bc88b7955d1b3145aa561539096c421ac8debde8cbc3612ec635fee29b" [[package]] name = "linux-raw-sys" @@ -705,13 +643,13 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.3" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "wasi", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] @@ -720,6 +658,17 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +[[package]] +name = "num-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + [[package]] name = "num-traits" version = "0.2.17" @@ -752,31 +701,31 @@ checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" [[package]] name = "pbjson" -version = "0.7.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7e6349fa080353f4a597daffd05cb81572a9c031a6d4fff7e504947496fcc68" +checksum = "1030c719b0ec2a2d25a5df729d6cff1acf3cc230bf766f4f97833591f7577b90" dependencies = [ - "base64 0.21.4", + "base64", "serde", ] [[package]] name = "pbjson-build" -version = "0.7.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eea3058763d6e656105d1403cb04e0a41b7bbac6362d413e7c33be0c32279c9" +checksum = "2580e33f2292d34be285c5bc3dba5259542b083cfad6037b6d70345f24dcb735" dependencies = [ - "heck 0.5.0", - "itertools 0.13.0", + "heck", + "itertools", "prost", "prost-types", ] [[package]] name = "pbjson-types" -version = "0.7.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e54e5e7bfb1652f95bc361d76f3c780d8e526b134b85417e774166ee941f0887" +checksum = "18f596653ba4ac51bdecbb4ef6773bc7f56042dc13927910de1684ad3d32aa12" dependencies = [ "bytes", "chrono", @@ -837,7 +786,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.38", ] [[package]] @@ -871,7 +820,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2", - "syn", + "syn 2.0.38", ] [[package]] @@ -885,9 +834,9 @@ dependencies = [ [[package]] name = "prost" -version = "0.13.4" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" +checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ "bytes", "prost-derive", @@ -895,12 +844,13 @@ dependencies = [ [[package]] name = "prost-build" -version = "0.13.4" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" +checksum = "c55e02e35260070b6f716a2423c2ff1c3bb1642ddca6f99e1f26d06268a0e2d2" dependencies = [ - "heck 0.4.1", - "itertools 0.11.0", + "bytes", + "heck", + "itertools", "log", "multimap", "once_cell", @@ -911,48 +861,49 @@ dependencies = [ "pulldown-cmark", "pulldown-cmark-to-cmark", "regex", - "syn", + "syn 2.0.38", "tempfile", + "which", ] [[package]] name = "prost-derive" -version = "0.13.4" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" +checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", - "itertools 0.11.0", + "itertools", "proc-macro2", "quote", - "syn", + "syn 2.0.38", ] [[package]] name = "prost-types" -version = "0.13.4" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" +checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" dependencies = [ "prost", ] [[package]] name = "pulldown-cmark" -version = "0.12.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14" +checksum = "77a1a2f1f0a7ecff9c31abbe177637be0e97a0aef46cf8738ece09327985d998" dependencies = [ - "bitflags 2.4.1", + "bitflags 1.3.2", "memchr", "unicase", ] [[package]] name = "pulldown-cmark-to-cmark" -version = "19.0.1" +version = "10.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e84a87de49d1b6c63f0998da7ade299905387ae1feae350efc98e0632637f589" +checksum = "0194e6e1966c23cc5fd988714f85b18d548d773e81965413555d96569931833d" dependencies = [ "pulldown-cmark", ] @@ -1059,7 +1010,7 @@ dependencies = [ "errno", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] @@ -1094,7 +1045,7 @@ checksum = "1e48d1f918009ce3145511378cf68d613e3b3d9137d67272562080d68a2b32d5" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.38", ] [[package]] @@ -1128,19 +1079,23 @@ dependencies = [ ] [[package]] -name = "smallvec" -version = "1.13.2" +name = "socket2" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +dependencies = [ + "libc", + "winapi", +] [[package]] name = "socket2" -version = "0.5.8" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys", ] [[package]] @@ -1158,6 +1113,17 @@ dependencies = [ "zeroize", ] +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "syn" version = "2.0.38" @@ -1171,9 +1137,9 @@ dependencies = [ [[package]] name = "sync_wrapper" -version = "1.0.2" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "tempfile" @@ -1185,18 +1151,21 @@ dependencies = [ "fastrand", "redox_syscall", "rustix", - "windows-sys 0.48.0", + "windows-sys", ] [[package]] name = "tendermint-proto" -version = "0.40.0" +version = "0.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c81ba1b023ec00763c3bc4f4376c67c0047f185cccf95c416c7a2f16272c4cbb" +checksum = "2cc728a4f9e891d71adf66af6ecaece146f9c7a11312288a3107b3e1d6979aaf" dependencies = [ "bytes", "flex-error", + "num-derive", + "num-traits", "prost", + "prost-types", "serde", "serde_bytes", "subtle-encoding", @@ -1232,36 +1201,34 @@ dependencies = [ [[package]] name = "tokio" -version = "1.42.0" +version = "1.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" +checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" dependencies = [ "backtrace", "bytes", "libc", "mio", "pin-project-lite", - "socket2", - "tokio-macros", - "windows-sys 0.52.0", + "socket2 0.5.4", + "windows-sys", ] [[package]] -name = "tokio-macros" -version = "2.4.0" +name = "tokio-io-timeout" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" dependencies = [ - "proc-macro2", - "quote", - "syn", + "pin-project-lite", + "tokio", ] [[package]] name = "tokio-stream" -version = "0.1.17" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" dependencies = [ "futures-core", "pin-project-lite", @@ -1284,26 +1251,23 @@ dependencies = [ [[package]] name = "tonic" -version = "0.12.3" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" dependencies = [ "async-stream", "async-trait", "axum", - "base64 0.22.1", + "base64", "bytes", "h2", "http", "http-body", - "http-body-util", "hyper", "hyper-timeout", - "hyper-util", "percent-encoding", "pin-project", "prost", - "socket2", "tokio", "tokio-stream", "tower", @@ -1314,16 +1278,15 @@ dependencies = [ [[package]] name = "tonic-build" -version = "0.12.3" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" +checksum = "9d021fc044c18582b9a2408cd0dd05b1596e3ecdb5c4df822bb0183545683889" dependencies = [ "prettyplease", "proc-macro2", "prost-build", - "prost-types", "quote", - "syn", + "syn 2.0.38", ] [[package]] @@ -1377,7 +1340,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.38", ] [[package]] @@ -1438,52 +1401,61 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "windows-sys" -version = "0.48.0" +name = "which" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ - "windows-targets 0.48.5", + "either", + "home", + "once_cell", + "rustix", ] [[package]] -name = "windows-sys" -version = "0.52.0" +name = "winapi" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ - "windows-targets 0.52.6", + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", ] [[package]] -name = "windows-targets" -version = "0.48.5" +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", + "windows-targets", ] [[package]] name = "windows-targets" -version = "0.52.6" +version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm 0.52.6", - "windows_aarch64_msvc 0.52.6", - "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", - "windows_i686_msvc 0.52.6", - "windows_x86_64_gnu 0.52.6", - "windows_x86_64_gnullvm 0.52.6", - "windows_x86_64_msvc 0.52.6", + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", ] [[package]] @@ -1492,90 +1464,42 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" - [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" - [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" -[[package]] -name = "windows_i686_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" - -[[package]] -name = "windows_i686_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" - [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" -[[package]] -name = "windows_i686_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" - [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" - [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" - [[package]] name = "windows_x86_64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" - [[package]] name = "zeroize" version = "1.6.0" diff --git a/tools/proto-compiler/Cargo.toml b/tools/proto-compiler/Cargo.toml index de3295b4a0..43dc1cab78 100644 --- a/tools/proto-compiler/Cargo.toml +++ b/tools/proto-compiler/Cargo.toml @@ -6,13 +6,13 @@ publish = false [dependencies] anyhow = "1" -ibc-proto = { version = "0.51.1" } -ics23 = "0.12.0" -pbjson = "0.7.0" -pbjson-build = "0.7.0" -pbjson-types = "0.7.0" -prost = "0.13.3" -prost-build = "0.13.3" -prost-types = "0.13.3" +ibc-proto = { version = "0.40.0" } +ics23 = "0.11.3" +pbjson = "0.6" +pbjson-build = "0.6" +pbjson-types = "0.6" +prost = "0.12.3" +prost-build = "0.12.3" +prost-types = "0.12" tempfile = "3" -tonic-build = { version = "0.12.3", features = ["cleanup-markdown"] } +tonic-build = { version = "0.10.0", features = ["cleanup-markdown"] } diff --git a/tools/proto-compiler/src/main.rs b/tools/proto-compiler/src/main.rs index 54f3d49547..8915c3d811 100644 --- a/tools/proto-compiler/src/main.rs +++ b/tools/proto-compiler/src/main.rs @@ -4,6 +4,13 @@ fn main() -> anyhow::Result<()> { let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")); println!("root: {}", root.display()); + // We build the proto files for the main penumbra_proto crate + // and for the cnidarium crate separately, because the + // cnidarium crate is supposed to be independent of the + // rest of the Penumbra codebase and its proto structures. + // Unfortunately, this means duplicating a lot of logic, because + // we can't share the prost_build::Config between the two. + let target_dir = root .join("..") .join("..") @@ -11,8 +18,16 @@ fn main() -> anyhow::Result<()> { .join("proto") .join("src") .join("gen"); + let cnidarium_target_dir = root + .join("..") + .join("..") + .join("crates") + .join("cnidarium") + .join("src") + .join("gen"); println!("target_dir: {}", target_dir.display()); + println!("cnidarium_target_dir: {}", cnidarium_target_dir.display()); // https://github.com/penumbra-zone/penumbra/issues/3038#issuecomment-1722534133 // Using the "no_lfs" suffix prevents matching a catch-all LFS rule. @@ -20,6 +35,7 @@ fn main() -> anyhow::Result<()> { // prost_build::Config isn't Clone, so we need to make two. let mut config = prost_build::Config::new(); + let mut cnidarium_config = prost_build::Config::new(); config.compile_well_known_types(); // As recommended in pbjson_types docs. @@ -32,13 +48,34 @@ fn main() -> anyhow::Result<()> { config.extern_path(".ics23", "::ics23"); config.extern_path(".cosmos.ics23", "::ics23"); + cnidarium_config.compile_well_known_types(); + cnidarium_config.extern_path(".google.protobuf", "::pbjson_types"); + cnidarium_config.extern_path(".ibc", "::ibc_proto::ibc"); + cnidarium_config.extern_path(".ics23", "::ics23"); + cnidarium_config.extern_path(".cosmos.ics23", "::ics23"); + config .out_dir(&target_dir) .file_descriptor_set_path(&target_dir.join(descriptor_file_name)) .enable_type_names(); + cnidarium_config + .out_dir(&cnidarium_target_dir) + .file_descriptor_set_path(&cnidarium_target_dir.join(descriptor_file_name)) + .enable_type_names(); let rpc_doc_attr = r#"#[cfg(feature = "rpc")]"#; + tonic_build::configure() + .out_dir(&cnidarium_target_dir) + .emit_rerun_if_changed(false) + .server_mod_attribute(".", rpc_doc_attr) + .client_mod_attribute(".", rpc_doc_attr) + .compile_with_config( + cnidarium_config, + &["../../proto/penumbra/penumbra/cnidarium/v1/cnidarium.proto"], + &["../../proto/penumbra/", "../../proto/rust-vendored/"], + )?; + tonic_build::configure() .out_dir(&target_dir) .emit_rerun_if_changed(false) @@ -47,7 +84,7 @@ fn main() -> anyhow::Result<()> { // We need to feature-gate the RPCs. .server_mod_attribute(".", rpc_doc_attr) .client_mod_attribute(".", rpc_doc_attr) - .compile_protos_with_config( + .compile_with_config( config, &[ "../../proto/penumbra/penumbra/core/app/v1/app.proto", @@ -74,6 +111,8 @@ fn main() -> anyhow::Result<()> { "../../proto/penumbra/penumbra/crypto/tct/v1/tct.proto", "../../proto/penumbra/penumbra/custody/v1/custody.proto", "../../proto/penumbra/penumbra/custody/threshold/v1/threshold.proto", + // Also included in the cnidarium crate directly. + "../../proto/penumbra/penumbra/cnidarium/v1/cnidarium.proto", "../../proto/penumbra/penumbra/tools/summoning/v1/summoning.proto", "../../proto/penumbra/penumbra/util/tendermint_proxy/v1/tendermint_proxy.proto", "../../proto/penumbra/penumbra/view/v1/view.proto", @@ -102,6 +141,13 @@ fn main() -> anyhow::Result<()> { // Finally, build pbjson Serialize, Deserialize impls: let descriptor_set = std::fs::read(target_dir.join(descriptor_file_name))?; + let cnidarium_descriptor_set = std::fs::read(cnidarium_target_dir.join(descriptor_file_name))?; + + pbjson_build::Builder::new() + .register_descriptors(&cnidarium_descriptor_set)? + .ignore_unknown_fields() + .out_dir(&cnidarium_target_dir) + .build(&[".penumbra"])?; pbjson_build::Builder::new() .register_descriptors(&descriptor_set)? diff --git a/tools/summonerd/src/main.rs b/tools/summonerd/src/main.rs index 8d819ffa1d..c051af56af 100644 --- a/tools/summonerd/src/main.rs +++ b/tools/summonerd/src/main.rs @@ -8,7 +8,6 @@ mod server; mod storage; mod web; -use anyhow::Context as _; use anyhow::Result; use ark_groth16::{ProvingKey, VerifyingKey}; use ark_serialize::CanonicalSerialize; @@ -34,7 +33,7 @@ use std::io::IsTerminal as _; use std::io::Read; use std::net::SocketAddr; use storage::Storage; -use tonic::service::Routes; +use tonic::transport::Server; use tracing::Instrument; use tracing_subscriber::{prelude::*, EnvFilter}; use url::Url; @@ -207,13 +206,11 @@ impl Opt { }; let service = CoordinatorService::new(knower, storage.clone(), queue.clone(), marker); - - let routes = Routes::new( + let grpc_server = Server::builder().add_service( CeremonyCoordinatorServiceServer::new(service) .max_encoding_message_size(max_message_size(marker)) .max_decoding_message_size(max_message_size(marker)), - ) - .prepare(); + ); let web_app = web_app( fvk.payment_address(0u32.into()).0, @@ -223,13 +220,11 @@ impl Opt { storage, ); - let router = routes.into_axum_router().merge(web_app); + let router = grpc_server.into_router().merge(web_app); tracing::info!(?bind_addr, "starting grpc and web server"); - let listener = tokio::net::TcpListener::bind(&bind_addr) - .await - .with_context(|| format!("failed to bind TCP socket addr `{bind_addr}`"))?; - let server_handle = axum::serve(listener, router); + let server_handle = + axum::Server::bind(&bind_addr).serve(router.into_make_service()); // TODO: better error reporting // We error out if a service errors, rather than keep running diff --git a/tools/summonerd/src/web.rs b/tools/summonerd/src/web.rs index c2a142f9ed..a358e40fa7 100644 --- a/tools/summonerd/src/web.rs +++ b/tools/summonerd/src/web.rs @@ -30,7 +30,7 @@ async fn serve_summoning_jpg() -> impl IntoResponse { .status(StatusCode::OK) .header("Content-Type", "image/jpeg") .header("Cache-Control", "public, max-age=3600") // Cache for 1 hour - .body(axum::body::Body::from(jpg)) + .body(axum::body::Full::from(jpg)) .unwrap() } @@ -40,7 +40,7 @@ async fn serve_css() -> impl IntoResponse { .status(StatusCode::OK) .header("Content-Type", "text/css") .header("Cache-Control", "public, max-age=3600") // Cache for 1 hour - .body(axum::body::Body::from(css)) + .body(axum::body::Full::from(css)) .unwrap() } @@ -62,7 +62,7 @@ async fn serve_woff2(filename: &str) -> impl IntoResponse { .status(StatusCode::OK) .header("Content-Type", "font/woff2") .header("Cache-Control", "public, max-age=3600") // Cache for 1 hour - .body(axum::body::Body::from(data)) + .body(axum::body::Full::from(data)) .unwrap() }