From 5e4c892b322c8570831b42d8b8c40101df5c3a67 Mon Sep 17 00:00:00 2001 From: Daniil Date: Thu, 14 Apr 2022 15:28:20 +0300 Subject: [PATCH] [feature] #2003: Introduce Parity Scale Decoder tool (#2080) Signed-off-by: Daniil Polyakov --- Cargo.lock | 31 +- Cargo.toml | 3 +- data_model/src/events/data/filters.rs | 2 +- data_model/tests/data_model.rs | 3 +- tools/parity_scale_decoder/Cargo.toml | 24 ++ tools/parity_scale_decoder/README.md | 72 ++++ .../parity_scale_decoder/samples/account.bin | Bin 0 -> 80 bytes tools/parity_scale_decoder/samples/domain.bin | Bin 0 -> 91 bytes .../parity_scale_decoder/samples/trigger.bin | Bin 0 -> 78 bytes .../parity_scale_decoder/src/generate_map.rs | 386 ++++++++++++++++++ tools/parity_scale_decoder/src/main.rs | 330 +++++++++++++++ 11 files changed, 846 insertions(+), 5 deletions(-) create mode 100644 tools/parity_scale_decoder/Cargo.toml create mode 100644 tools/parity_scale_decoder/README.md create mode 100644 tools/parity_scale_decoder/samples/account.bin create mode 100644 tools/parity_scale_decoder/samples/domain.bin create mode 100644 tools/parity_scale_decoder/samples/trigger.bin create mode 100644 tools/parity_scale_decoder/src/generate_map.rs create mode 100644 tools/parity_scale_decoder/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index 1d2ca570c94..6dc6867f5bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -531,6 +531,17 @@ dependencies = [ "tracing-error", ] +[[package]] +name = "colored" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd" +dependencies = [ + "atty", + "lazy_static", + "winapi", +] + [[package]] name = "console" version = "0.15.0" @@ -1089,9 +1100,9 @@ dependencies = [ [[package]] name = "eyre" -version = "0.6.7" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9289ed2c0440a6536e65119725cf91fc2c6b5e513bfd2e36e1134d7cca6ca12f" +checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" dependencies = [ "indenter", "once_cell", @@ -2527,6 +2538,22 @@ dependencies = [ "syn", ] +[[package]] +name = "parity_scale_decoder" +version = "2.0.0-pre-rc.3" +dependencies = [ + "clap 3.1.8", + "colored", + "eyre", + "iroha_core", + "iroha_crypto", + "iroha_data_model", + "iroha_schema", + "iroha_schema_bin", + "iroha_version", + "parity-scale-codec", +] + [[package]] name = "parking_lot" version = "0.11.2" diff --git a/Cargo.toml b/Cargo.toml index 66b9a53d61e..d224ef5339c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,6 +13,8 @@ members = [ "core/test_network", "crypto", "tools/crypto_cli", + "tools/kura_inspector", + "tools/parity_scale_decoder", "data_model", "data_model/primitives", "futures", @@ -27,7 +29,6 @@ members = [ "schema/derive", "substrate", "telemetry", - "tools/kura_inspector", "version", "version/derive", ] diff --git a/data_model/src/events/data/filters.rs b/data_model/src/events/data/filters.rs index f3920adca05..b11b34650f7 100644 --- a/data_model/src/events/data/filters.rs +++ b/data_model/src/events/data/filters.rs @@ -689,7 +689,7 @@ pub mod prelude { peer::{PeerEventFilter, PeerFilter}, trigger::{TriggerEventFilter, TriggerFilter}, EntityFilter as DataEntityFilter, EventFilter as DataEventFilter, - FilterOpt::*, + FilterOpt::{self, *}, IdFilter, }; } diff --git a/data_model/tests/data_model.rs b/data_model/tests/data_model.rs index 1acfdb494ad..0812c69a741 100644 --- a/data_model/tests/data_model.rs +++ b/data_model/tests/data_model.rs @@ -143,6 +143,7 @@ mod register { } } +#[allow(unused_must_use)] #[test] fn find_rate_and_make_exchange_isi_should_succeed() { let kp = KeyPair { @@ -255,7 +256,7 @@ fn find_rate_and_make_exchange_isi_should_succeed() { .request(FindAssetQuantityById::new(asset_id_new( "btc", "crypto", "seller", "company", ))) - .expect_err("Failed to execute Iroha Query"); + .expect_err("Query must fail"); let buyer_eth_quantity = iroha_client .request(FindAssetQuantityById::new(asset_id_new( diff --git a/tools/parity_scale_decoder/Cargo.toml b/tools/parity_scale_decoder/Cargo.toml new file mode 100644 index 00000000000..a8f7a26a104 --- /dev/null +++ b/tools/parity_scale_decoder/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "parity_scale_decoder" +version = "2.0.0-pre-rc.3" +authors = ["Iroha 2 team "] +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[features] +no-color = ["colored/no-color"] + +[dependencies] +iroha_data_model = { version = "=2.0.0-pre-rc.3", path = "../../data_model", features = ["warp"]} +iroha_schema = { version = "=2.0.0-pre-rc.3", path = "../../schema"} +iroha_core = { version = "=2.0.0-pre-rc.3", path = "../../core", features = ["roles"] } +iroha_crypto = { version = "=2.0.0-pre-rc.3", path = "../../crypto", default-features = false } +iroha_version = { version = "=2.0.0-pre-rc.3", path = "../../version", default-features = false } +clap = { version = "3.1.8", features = ["derive", "cargo"] } +eyre = "0.6.8" +parity-scale-codec = { version = "2.3.1", default-features = false } +colored = "2.0.0" + +[dev-dependencies] +iroha_schema_bin = { version = "=2.0.0-pre-rc.3", path = "../../schema/bin"} diff --git a/tools/parity_scale_decoder/README.md b/tools/parity_scale_decoder/README.md new file mode 100644 index 00000000000..a1e2d2cfeb2 --- /dev/null +++ b/tools/parity_scale_decoder/README.md @@ -0,0 +1,72 @@ +# Parity Scale Decoder Tool + +## Description + +This tool will help you to decode **Iroha 2** types from binaries using [Parity Scale Codec](https://github.com/paritytech/parity-scale-codec) + +## Usage + +Building: + +```bash +cargo build --bin parity_scale_decoder +``` + +If your terminal does not support colors: + +```bash +cargo build --features no-color --bin parity_scale_decoder +``` + +From the main project directory: + +* List all supported types: + + ```bash + ./target/debug/parity_scale_decoder list-type + ``` + +* Decode type from binary: + + ```bash + ./target/debug/parity_scale_decoder decode --type + ``` + + As an example you can use provided samples: + + ```bash + ./target/debug/parity_scale_decoder decode tools/parity_scale_decoder/samples/account.bin --type Account + ``` + +* Decode any type from binary: + + If you are not sure about type you can simply omit `--type` option: + + ```bash + ./target/debug/parity_scale_decoder decode + ``` + +* To see all available options run: + + ```bash + ./target/debug/parity_scale_decoder --help + ``` + +## Contributing + +Check out [this document](https://github.com/hyperledger/iroha/blob/iroha2-dev/CONTRIBUTING.md) + +## [Need help?](https://github.com/hyperledger/iroha/blob/iroha2-dev/CONTRIBUTING.md#contact) + +## License + +Iroha codebase is licensed under the Apache License, +Version 2.0 (the "License"); you may not use this file except +in compliance with the License. You may obtain a copy of the +License at http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/tools/parity_scale_decoder/samples/account.bin b/tools/parity_scale_decoder/samples/account.bin new file mode 100644 index 0000000000000000000000000000000000000000..1f75361f6fec3f97808466312273ed834045b776 GIT binary patch literal 80 zcmWeh%*jkn)hN%;OGz!tNz6-OU|yqY7#5J3neFRkQEKGp ulb)MiT$OHUTo760;_aHBS;`XNSsd?`n3R-SRGy!lT^XO4lUbI^#0UVq${+Cn literal 0 HcmV?d00001 diff --git a/tools/parity_scale_decoder/samples/trigger.bin b/tools/parity_scale_decoder/samples/trigger.bin new file mode 100644 index 0000000000000000000000000000000000000000..fdfad52060db71252d942ccb5a76491ddc1b2ca3 GIT binary patch literal 78 zcmY$%&CDx_FUl`YWnf|AWng4rVBqCo761uol;`KAq!#5Q=B0=v=42)#3ou}aFfuYR GFaQ8@yB8Dy literal 0 HcmV?d00001 diff --git a/tools/parity_scale_decoder/src/generate_map.rs b/tools/parity_scale_decoder/src/generate_map.rs new file mode 100644 index 00000000000..a04e558fcd9 --- /dev/null +++ b/tools/parity_scale_decoder/src/generate_map.rs @@ -0,0 +1,386 @@ +//! Exports `generate_map()` function and contains implementation details for it + +use std::collections::BTreeSet; + +use iroha_core::*; +use iroha_crypto::*; +use iroha_data_model::{prelude::*, *}; +use iroha_schema::IntoSchema; +use iroha_version::*; + +use super::*; + +/// Trait to retrieve type name +/// +/// It is used with abusing [inherit impls](https://doc.rust-lang.org/reference/items/implementations.html#inherent-implementations) +/// to get `None` variant from types, which doesn't implement [`IntoSchema`] and `Some` which does +trait TypeName { + /// Get name of the type or `None` if type doesn't implement `IntoSchema` + fn type_name() -> Option; +} + +impl TypeName for T { + fn type_name() -> Option { + None + } +} + +/// Neotype which has `type_name()` method when `T` implements [`IntoSchema`] +struct WithTypeName(std::marker::PhantomData); + +impl WithTypeName { + /// Get type name using [`IntoSchema::type_name()`] + /// + /// Because this is implemented directly on `WithTypeName`, it has priority over + /// the [`TypeName`] trait impl. + /// + /// Note: this is a *totally different* function from that in + /// `TypeName`. This does not specialize the `TypeName` trait impl on `WithTypeName`. + fn type_name() -> Option { + Some(::type_name()) + } +} + +macro_rules! generate_map { + ($($t:ty),* $(,)?) => { + #[allow(trivial_casts)] + BTreeMap::from([ + $(( + WithTypeName::<$t>::type_name().unwrap_or(stringify!($t).to_owned()), + <$t as DumpDecoded>::dump_decoded as DumpDecodedPtr + )),* + ]) + }; +} + +/// Generate map with types and `dump_decoded()` ptr +#[allow(clippy::too_many_lines)] +pub fn generate_map() -> DumpDecodedMap { + generate_map! { + Account, + AccountEvent, + AccountEventFilter, + AccountFilter, + AccountId, + Action, + Add, + And, + Asset, + AssetDefinition, + AssetDefinitionEntry, + AssetDefinitionEvent, + AssetDefinitionEventFilter, + AssetDefinitionFilter, + AssetDefinitionId, + AssetEvent, + AssetEventFilter, + AssetFilter, + AssetId, + AssetValue, + AssetValueType, + BTreeMap, + BTreeMap, + BTreeMap, + BTreeMap, + BTreeMap>, + BTreeMap>, + BTreeMap>, + BTreeMap>, + BTreeSet, + BTreeSet, + BTreeSet>, + BTreeSet>, + BTreeSet, + BlockRejectionReason, + BurnBox, + Contains, + ContainsAll, + ContainsAny, + ContextValue, + DataEntityFilter, + DataEvent, + DataEventFilter, + Divide, + Domain, + DomainEvent, + DomainEventFilter, + DomainFilter, + DomainId, + Equal, + Executable, + ExecuteTriggerBox, + ExecuteTriggerEvent, + ExecuteTriggerEventFilter, + ExecutionTime, + Expression, + FailBox, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt>, + FilterOpt>, + FilterOpt>, + FilterOpt>, + FilterOpt>, + FilterOpt>, + FilterOpt>, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt, + FilterOpt, + FindAccountById, + FindAccountKeyValueByIdAndKey, + FindAccountsByDomainId, + FindAccountsByName, + FindAllAccounts, + FindAllAssets, + FindAllAssetsDefinitions, + FindAllDomains, + FindAllParameters, + FindAllPeers, + FindAllRoles, + FindAssetById, + FindAssetDefinitionKeyValueByIdAndKey, + FindAssetKeyValueByIdAndKey, + FindAssetQuantityById, + FindAssetsByAccountId, + FindAssetsByAssetDefinitionId, + FindAssetsByDomainId, + FindAssetsByDomainIdAndAssetDefinitionId, + FindAssetsByName, + FindDomainById, + FindDomainKeyValueByIdAndKey, + FindPermissionTokensByAccountId, + FindRolesByAccountId, + FindTransactionByHash, + FindTransactionsByAccountId, + GenesisDomain, + GrantBox, + Greater, + Hash, + HashOf, + HashOf, + HashOf>, + HashOf>, + HashOf, + HashOf, + IdBox, + IdFilter, + IdFilter, + IdFilter, + IdFilter, + IdFilter, + IdFilter, + IdFilter, + IdentifiableBox, + IfExpression, + IfInstruction, + Instruction, + InstructionExecutionFail, + Less, + Metadata, + MetadataLimits, + MintBox, + Mod, + Multiply, + Name, + Not, + NotPermittedFail, + Option, + Option, + Option, + Option, + Option, + Option, + Option, + Option, + Option, + Or, + Pair, + Parameter, + Payload, + Peer, + PeerEvent, + PeerEventFilter, + PeerFilter, + PeerId, + PendingTransactions, + PermissionToken, + PipelineEntityKind, + PipelineEvent, + PipelineEventFilter, + PipelineStatus, + PublicKey, + QueryBox, + QueryRequest, + QueryResult, + RaiseTo, + RawVersioned, + RegisterBox, + RegistrableBox, + RejectedTransaction, + RejectionReason, + RemoveKeyValueBox, + Repeats, + RevokeBox, + Role, + RoleEvent, + RoleEventFilter, + RoleFilter, + RoleId, + SequenceBox, + SetKeyValueBox, + Signature, + SignatureCheckCondition, + SignatureOf, + SignatureOf, + SignatureOf, + SignatureOf, + SignatureOf, + SignaturesOf, + SignaturesOf, + SignaturesOf, + SignedQueryRequest, + String, + Subtract, + TimeEvent, + TimeEventFilter, + TimeInterval, + TimeSchedule, + Transaction, + TransactionRejectionReason, + TransactionValue, + TransferBox, + Trigger, + TriggerEvent, + TriggerEventFilter, + TriggerFilter, + TriggerId, + UnregisterBox, + UnsatisfiedSignatureConditionFail, + UnsupportedVersion, + ValidTransaction, + Value, + Vec>, + Vec, + Vec>, + Vec>, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + Vec, + VersionedPendingTransactions, + VersionedQueryResult, + VersionedRejectedTransaction, + VersionedSignedQueryRequest, + VersionedTransaction, + VersionedValidTransaction, + WasmExecutionFail, + Where, + [u8; 32], + account::NewAccount, + asset::Mintable, + block::BlockHeader, + block::CommittedBlock, + block::ValidBlock, + block::VersionedCommittedBlock, + block::VersionedValidBlock, + block::stream::BlockPublisherMessage, + block::stream::BlockSubscriberMessage, + block::stream::VersionedBlockPublisherMessage, + block::stream::VersionedBlockSubscriberMessage, + bool, + core::time::Duration, + domain::IpfsPath, + domain::NewDomain, + error::Error, + events::Event, + events::EventFilter, + events::EventPublisherMessage, + events::EventSubscriberMessage, + events::VersionedEventPublisherMessage, + events::VersionedEventSubscriberMessage, + events::pipeline::StatusKind, + expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo, + expression::EvaluatesTo>, + expression::EvaluatesTo, + expression::EvaluatesTo, + fixed::FixNum, + fixed::Fixed, + i64, + query::Payload, + smartcontracts::isi::error::FindError, + smartcontracts::isi::error::ParentHashNotFound, + smartcontracts::isi::query::Error, + smartcontracts::isi::query::UnsupportedVersionError, + sumeragi::network_topology::Topology, + sumeragi::view_change::BlockCreationTimeout, + sumeragi::view_change::CommitTimeout, + sumeragi::view_change::NoTransactionReceiptReceived, + sumeragi::view_change::Proof, + sumeragi::view_change::ProofChain, + sumeragi::view_change::ProofPayload, + sumeragi::view_change::Reason, + transaction::TransactionLimitError, + transaction::WasmSmartContract, + u128, + u32, + u64, + u8, + } +} + +#[cfg(test)] +mod tests { + use std::collections::HashSet; + + use iroha_schema_bin::build_schemas; + + use super::*; + + #[test] + fn schemas_types_is_a_subset_of_map_types() { + // Exceptions which does not implement `Decode` so that they can't be decoded by this tool + let exceptions = HashSet::from([ + "Vec", + "iroha_core::genesis::GenesisTransaction", + "iroha_core::genesis::RawGenesisBlock", + "iroha_data_model::merkle::Leaf", + "iroha_data_model::merkle::MerkleTree", + "iroha_data_model::merkle::Node", + "iroha_data_model::merkle::Subtree", + "iroha_schema::Compact", + ]); + + let schemas_types = build_schemas() + .into_keys() + .filter(|type_name| !exceptions.contains(type_name.as_str())) + .collect::>(); + let map_types = generate_map().into_keys().collect::>(); + + assert!( + schemas_types.is_subset(&map_types), + "Difference: {:#?}", + schemas_types.difference(&map_types) + ); + } +} diff --git a/tools/parity_scale_decoder/src/main.rs b/tools/parity_scale_decoder/src/main.rs new file mode 100644 index 00000000000..6df7e237779 --- /dev/null +++ b/tools/parity_scale_decoder/src/main.rs @@ -0,0 +1,330 @@ +//! Parity Scale decoder tool for Iroha data types. For usage run with `--help` + +#![allow(clippy::print_stdout, clippy::use_debug, clippy::unnecessary_wraps)] + +use std::{collections::BTreeMap, fmt::Debug, fs, io, path::PathBuf}; + +use clap::Parser; +use colored::*; +use eyre::{eyre, Result}; +use parity_scale_codec::Decode; + +mod generate_map; +use generate_map::generate_map; + +/// Parity Scale decoder tool for Iroha data types +#[derive(Debug, Parser)] +#[clap(version, about, author)] +enum Args { + /// Show all available types + ListTypes, + /// Decode type from binary + Decode(DecodeArgs), +} + +#[derive(Debug, clap::Args)] +struct DecodeArgs { + /// Path to the binary with encoded Iroha structure + binary: PathBuf, + /// Type that is expected to be encoded in binary. + /// If not specified then a guess will be attempted + #[clap(short, long = "type")] + type_id: Option, +} + +/// Function pointer to [`DumpDecoded::dump_decoded()`] +/// +/// Function pointer is used cause trait object can not be used +/// due to [`Sized`] bound in [`Decode`] trait +pub type DumpDecodedPtr = fn(&[u8], &mut dyn io::Write) -> Result<(), eyre::Error>; + +/// Map (Type Name -> `dump_decode()` ptr) +pub type DumpDecodedMap = BTreeMap; + +/// Types implementing this trait can be decoded from bytes +/// with *Parity Scale Codec* and dumped to something implementing [`Write`] +pub trait DumpDecoded: Debug + Decode { + /// Decode `Self` from `input` and dump to `w` + /// + /// # Errors + /// - If decoding from *Parity Scale Codec* fails + /// - If writing into `w` fails + fn dump_decoded(mut input: &[u8], w: &mut dyn io::Write) -> Result<(), eyre::Error> { + let obj = ::decode(&mut input)?; + #[allow(clippy::use_debug)] + writeln!(w, "{:#?}", obj)?; + Ok(()) + } +} + +impl DumpDecoded for T {} + +fn main() -> Result<()> { + let args = Args::parse(); + + let map = generate_map(); + let stdout = io::stdout(); + let mut writer = io::BufWriter::new(stdout.lock()); + + match args { + Args::Decode(decode_args) => { + let decoder = Decoder::new(decode_args, &map); + decoder.decode(&mut writer) + } + Args::ListTypes => list_types(&map, &mut writer), + } +} + +/// Type decoder +struct Decoder<'map> { + args: DecodeArgs, + map: &'map DumpDecodedMap, +} + +impl<'map> Decoder<'map> { + /// Create new `Decoder` with `args` and `map` + pub fn new(args: DecodeArgs, map: &'map DumpDecodedMap) -> Self { + Self { args, map } + } + + /// Decode type and print to `writer` + pub fn decode(&self, writer: &mut W) -> Result<()> { + let bytes = fs::read(self.args.binary.clone())?; + + if let Some(type_id) = &self.args.type_id { + return self.decode_by_type(type_id, &bytes, writer); + } + self.decode_by_guess(&bytes, writer) + } + + /// Decode concrete `type` from `bytes` and print to `writer` + fn decode_by_type( + &self, + type_id: &str, + bytes: &[u8], + writer: &mut W, + ) -> Result<()> { + self.map.get(type_id).map_or_else( + || Err(eyre!("Unknown type: `{type_id}`")), + |dump_decoded| dump_decoded(bytes, writer), + ) + } + + /// Try to decode every type from `bytes` and print to `writer` + /// + /// TODO: Can be parallelized when there will be too many types + fn decode_by_guess(&self, bytes: &[u8], writer: &mut W) -> Result<()> { + let count = self + .map + .iter() + .filter_map(|(type_name, dump_decoded)| { + let mut buf = Vec::new(); + dump_decoded(bytes, &mut buf) + .ok() + .and_then(|_| String::from_utf8(buf).ok()) + .and_then(|formatted| { + writeln!(writer, "{}:\n{}", type_name.italic().cyan(), formatted).ok() + }) + }) + .count(); + match count { + 0 => writeln!(writer, "No compatible types found"), + 1 => writeln!(writer, "{} compatible type found", "1".bold()), + n => writeln!(writer, "{} compatible types found", n.to_string().bold()), + } + .map_err(Into::into) + } +} + +/// Print all supported types from `map` to `writer` +fn list_types(map: &DumpDecodedMap, writer: &mut W) -> Result<()> { + for key in map.keys() { + writeln!(writer, "{key}")?; + } + if !map.is_empty() { + writeln!(writer)?; + } + + match map.len() { + 0 => writeln!(writer, "No type is supported"), + 1 => writeln!(writer, "{} type is supported", "1".bold()), + n => writeln!(writer, "{} types are supported", n.to_string().bold()), + } + .map_err(Into::into) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn decode_account_sample() { + decode_sample( + "account.bin", + String::from("iroha_data_model::account::Account"), + r###"Account { + id: Id { + name: "alice", + domain_id: Id { + name: "wonderland", + }, + }, + assets: {}, + signatories: {}, + permission_tokens: {}, + signature_check_condition: SignatureCheckCondition( + EvaluatesTo { + expression: ContainsAny( + ContainsAny { + collection: EvaluatesTo { + expression: ContextValue( + ContextValue { + value_name: "transaction_signatories", + }, + ), + _value_type: PhantomData, + }, + elements: EvaluatesTo { + expression: ContextValue( + ContextValue { + value_name: "account_signatories", + }, + ), + _value_type: PhantomData, + }, + }, + ), + _value_type: PhantomData, + }, + ), + metadata: Metadata { + map: { + "hat": Name( + "white", + ), + }, + }, + roles: {}, +} +"###, + ); + } + + #[test] + fn decode_domain_sample() { + decode_sample( + "domain.bin", + String::from("iroha_data_model::domain::Domain"), + r###"Domain { + id: Id { + name: "wonderland", + }, + accounts: {}, + asset_definitions: {}, + logo: Some( + IpfsPath( + "/ipfs/Qme7ss3ARVgxv6rXqVPiikMJ8u2NLgmgszg13pYrDKEoiu", + ), + ), + metadata: Metadata { + map: { + "Is_Jabberwocky_alive": Bool( + true, + ), + }, + }, +} +"###, + ); + } + + #[test] + fn decode_trigger_sample() { + decode_sample( + "trigger.bin", + String::from("iroha_data_model::trigger::Trigger"), + r###"Trigger { + id: Id { + name: "mint_rose", + }, + action: Action { + executable: Instructions( + [ + Mint( + MintBox { + object: EvaluatesTo { + expression: Raw( + U32( + 1, + ), + ), + _value_type: PhantomData, + }, + destination_id: EvaluatesTo { + expression: Raw( + Id( + AssetId( + Id { + definition_id: DefinitionId { + name: "rose", + domain_id: Id { + name: "wonderland", + }, + }, + account_id: Id { + name: "alice", + domain_id: Id { + name: "wonderland", + }, + }, + }, + ), + ), + ), + _value_type: PhantomData, + }, + }, + ), + ], + ), + repeats: Indefinitely, + technical_account: Id { + name: "alice", + domain_id: Id { + name: "wonderland", + }, + }, + filter: Data( + BySome( + ByAccount( + AcceptAll, + ), + ), + ), + }, + metadata: Metadata { + map: {}, + }, +} +"###, + ); + } + + #[allow(clippy::unwrap_used)] + fn decode_sample(sample_path: &str, type_id: String, expected_output: &str) { + let mut binary = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + binary.push("samples/"); + binary.push(sample_path); + let args = DecodeArgs { + binary, + type_id: Some(type_id), + }; + + let map = generate_map(); + let decoder = Decoder::new(args, &map); + let mut buf = Vec::new(); + decoder.decode(&mut buf).unwrap(); + + assert_eq!(String::from_utf8(buf).unwrap(), expected_output); + } +}